Commit 7eb9d8f7 authored by Guido Trotter's avatar Guido Trotter
Browse files

Parallelize LUQueryInstances

This first version acquires a shared lock on all requested instances and
their nodes. In the future it can be improved by acquiring less locks if
no dynamic fields have been asked, and/or by locking just primary nodes.

Reviewed-by: imsnah
parent d4f6a91c
......@@ -2452,13 +2452,9 @@ class LUQueryInstances(NoHooksLU):
"""
_OP_REQP = ["output_fields", "names"]
REQ_BGL = False
def CheckPrereq(self):
"""Check prerequisites.
This checks that the fields required are valid output fields.
"""
def ExpandNames(self):
self.dynamic_fields = frozenset(["oper_state", "oper_ram", "status"])
_CheckOutputFields(static=["name", "os", "pnode", "snodes",
"admin_state", "admin_ram",
......@@ -2467,7 +2463,33 @@ class LUQueryInstances(NoHooksLU):
dynamic=self.dynamic_fields,
selected=self.op.output_fields)
self.wanted = _GetWantedInstances(self, self.op.names)
self.needed_locks = {}
self.share_locks[locking.LEVEL_INSTANCE] = 1
self.share_locks[locking.LEVEL_NODE] = 1
# TODO: we could lock instances (and nodes) only if the user asked for
# dynamic fields. For that we need atomic ways to get info for a group of
# instances from the config, though.
if not self.op.names:
self.needed_locks[locking.LEVEL_INSTANCE] = None # Acquire all
else:
self.needed_locks[locking.LEVEL_INSTANCE] = \
_GetWantedInstances(self, self.op.names)
self.needed_locks[locking.LEVEL_NODE] = []
self.recalculate_locks[locking.LEVEL_NODE] = 'replace'
def DeclareLocks(self, level):
# TODO: locking of nodes could be avoided when not querying them
if level == locking.LEVEL_NODE:
self._LockInstancesNodes()
def CheckPrereq(self):
"""Check prerequisites.
"""
# This of course is valid only if we locked the instances
self.wanted = self.needed_locks[locking.LEVEL_INSTANCE]
def Exec(self, feedback_fn):
"""Computes the list of nodes and their attributes.
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment