diff --git a/Makefile.am b/Makefile.am
index a9ef001c3957643d1ce0d9313890737a74a2fd56..444b53e26df45e6778066ca09ca0f539a527277a 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -9,6 +9,11 @@
 abs_top_builddir = @abs_top_builddir@
 abs_top_srcdir = @abs_top_srcdir@
 
+# Helper values for calling builtin functions
+empty :=
+space := $(empty) $(empty)
+comma := ,
+
 # Use bash in order to be able to use pipefail
 SHELL=/bin/bash
 
@@ -150,15 +155,17 @@ CLEANFILES = \
 BUILT_SOURCES = \
 	ganeti \
 	stamp-srclinks \
-	lib/_autoconf.py \
-	lib/_vcsversion.py \
 	$(all_dirfiles) \
-	$(PYTHON_BOOTSTRAP)
+	$(PYTHON_BOOTSTRAP) \
+	$(BUILT_PYTHON_SOURCES)
 
-nodist_pkgpython_PYTHON = \
+BUILT_PYTHON_SOURCES = \
 	lib/_autoconf.py \
 	lib/_vcsversion.py
 
+nodist_pkgpython_PYTHON = \
+	$(BUILT_PYTHON_SOURCES)
+
 noinst_PYTHON = \
 	lib/build/__init__.py \
 	lib/build/sphinx_ext.py
@@ -365,7 +372,7 @@ $(RUN_IN_TEMPDIR): | $(all_dirfiles)
 # it changes
 doc/html/index.html: $(docrst) $(docpng) doc/conf.py configure.ac \
 	$(RUN_IN_TEMPDIR) lib/build/sphinx_ext.py lib/opcodes.py lib/ht.py \
-	| lib/_autoconf.py lib/_vcsversion.py
+	| $(BUILT_PYTHON_SOURCES)
 	@test -n "$(SPHINX)" || \
 	    { echo 'sphinx-build' not found during configure; exit 1; }
 	@mkdir_p@ $(dir $@)
@@ -1063,9 +1070,23 @@ hs-check: htools/test
 	@rm -f test.tix
 	./htools/test
 
+# E111: indentation is not a multiple of four
+# E261: at least two spaces before inline comment
+# E501: line too long (80 characters)
+PEP8_IGNORE = E111,E261,E501
+
+# For excluding pep8 expects filenames only, not whole paths
+PEP8_EXCLUDE = $(subst $(space),$(comma),$(strip $(notdir $(BUILT_PYTHON_SOURCES))))
+
 .PHONY: lint
 lint: $(BUILT_SOURCES)
 	@test -n "$(PYLINT)" || { echo 'pylint' not found during configure; exit 1; }
+	if test -z "$(PEP8)"; then \
+		echo '"pep8" not found during configure' >&2; \
+	else \
+		$(PEP8) --repeat --ignore='$(PEP8_IGNORE)' --exclude='$(PEP8_EXCLUDE)' \
+			$(lint_python_code); \
+	fi
 	$(PYLINT) $(LINT_OPTS) $(lint_python_code)
 	cd $(top_srcdir)/qa && \
 	  PYTHONPATH=$(abs_top_srcdir) $(PYLINT) $(LINT_OPTS) \
diff --git a/autotools/check-python-code b/autotools/check-python-code
index ec1d96bc23cdcb741392d51faaa65256452c3765..166e12db184eea50a574afb6ccbc0601b14ce182 100755
--- a/autotools/check-python-code
+++ b/autotools/check-python-code
@@ -20,6 +20,13 @@
 
 set -e
 
+readonly maxlinelen=$(for ((i=0; i<81; ++i)); do echo -n .; done)
+
+if [[ "${#maxlinelen}" != 81 ]]; then
+  echo "Internal error: Check for line length is incorrect" >&2
+  exit 1
+fi
+
 # "[...] If the last ARG evaluates to 0, let returns 1; 0 is returned
 # otherwise.", hence ignoring the return value.
 let problems=0 || :
@@ -47,7 +54,7 @@ for script; do
     echo "Found editor-specific settings in $script" >&2
   fi
 
-  if [[ "$(wc --max-line-length < "$script")" -gt 80 ]]; then
+  if grep -n -H "^$maxlinelen" "$script"; then
     let ++problems
     echo "Longest line in $script is longer than 80 characters" >&2
   fi
diff --git a/configure.ac b/configure.ac
index 25491a1651201551e73ca1aa5a40e8a57a1a47bc..764a59093e2f17afe52dc6af86cba9161a5c3677 100644
--- a/configure.ac
+++ b/configure.ac
@@ -317,6 +317,14 @@ then
   AC_MSG_WARN([pylint not found, checking code will not be possible])
 fi
 
+# Check for pep8
+AC_ARG_VAR(PEP8, [pep8 path])
+AC_PATH_PROG(PEP8, [pep8], [])
+if test -z "$PEP8"
+then
+  AC_MSG_WARN([pep8 not found, checking code will not be complete])
+fi
+
 # Check for socat
 AC_ARG_VAR(SOCAT, [socat path])
 AC_PATH_PROG(SOCAT, [socat], [])
diff --git a/doc/devnotes.rst b/doc/devnotes.rst
index 35d223c62d9f98172e30b064420f6f554c07569c..f9646fbab426bb8bc90f3c8b662ba8c3d4398644 100644
--- a/doc/devnotes.rst
+++ b/doc/devnotes.rst
@@ -16,6 +16,7 @@ Most dependencies from :doc:`install-quick`, plus (for Python):
 - the `en_US.UTF-8` locale must be enabled on the system
 - `pylint <http://www.logilab.org/857>`_ and its associated
   dependencies
+- `pep8 <https://github.com/jcrocholl/pep8/>`_
 
 Note that for pylint, at the current moment the following versions
 need to be used::
diff --git a/lib/client/gnt_group.py b/lib/client/gnt_group.py
index 538c1cbe9fa4cf6ba6fd77844542d86e769bb459..2fb73e1b07b62e0194dc6e9da08154f7159bf0b5 100644
--- a/lib/client/gnt_group.py
+++ b/lib/client/gnt_group.py
@@ -130,17 +130,13 @@ def SetGroupParams(opts, args):
   @return: the desired exit code
 
   """
-  all_changes = {
-    "ndparams": opts.ndparams,
-    "alloc_policy": opts.alloc_policy,
-  }
-
-  if all_changes.values().count(None) == len(all_changes):
+  if opts.ndparams is None and opts.alloc_policy is None:
     ToStderr("Please give at least one of the parameters.")
     return 1
 
-  op = opcodes.OpGroupSetParams(group_name=args[0], # pylint: disable-msg=W0142
-                                **all_changes)
+  op = opcodes.OpGroupSetParams(group_name=args[0],
+                                ndparams=opts.ndparams,
+                                alloc_policy=opts.alloc_policy)
   result = SubmitOrSend(op, opts)
 
   if result:
diff --git a/lib/cmdlib.py b/lib/cmdlib.py
index a57a0d9b02e90da8425dc39a5ecf6cebfd0c7cac..d64e6c7f889af29933ac8996af11f0f5066450ee 100644
--- a/lib/cmdlib.py
+++ b/lib/cmdlib.py
@@ -9828,6 +9828,8 @@ class TLReplaceDisks(Tasklet):
     """
     steps_total = 6
 
+    pnode = self.instance.primary_node
+
     # Step: check device activation
     self.lu.LogStep(1, steps_total, "Check device existence")
     self._CheckDisksExistence([self.instance.primary_node])
@@ -9902,10 +9904,8 @@ class TLReplaceDisks(Tasklet):
                                  " soon as possible"))
 
     self.lu.LogInfo("Detaching primary drbds from the network (=> standalone)")
-    result = self.rpc.call_drbd_disconnect_net([self.instance.primary_node],
-                                               self.node_secondary_ip,
-                                               self.instance.disks)\
-                                              [self.instance.primary_node]
+    result = self.rpc.call_drbd_disconnect_net([pnode], self.node_secondary_ip,
+                                               self.instance.disks)[pnode]
 
     msg = result.fail_msg
     if msg:
diff --git a/tools/cluster-merge b/tools/cluster-merge
index 7d7091f7749c311f9e658f60b8938583ad8e6b84..e2fc8485b302510ed094972d880e19284f623e36 100755
--- a/tools/cluster-merge
+++ b/tools/cluster-merge
@@ -40,6 +40,7 @@ from ganeti import constants
 from ganeti import errors
 from ganeti import ssh
 from ganeti import utils
+from ganeti import netutils
 
 
 _GROUPS_MERGE = "merge"
@@ -109,13 +110,16 @@ class MergerData(object):
   """Container class to hold data used for merger.
 
   """
-  def __init__(self, cluster, key_path, nodes, instances, config_path=None):
+  def __init__(self, cluster, key_path, nodes, instances, master_node,
+               master_ip, config_path=None):
     """Initialize the container.
 
     @param cluster: The name of the cluster
     @param key_path: Path to the ssh private key used for authentication
     @param nodes: List of online nodes in the merging cluster
     @param instances: List of instances running on merging cluster
+    @param master_node: Name of the master node
+    @param master_ip: Cluster IP
     @param config_path: Path to the merging cluster config
 
     """
@@ -123,6 +127,8 @@ class MergerData(object):
     self.key_path = key_path
     self.nodes = nodes
     self.instances = instances
+    self.master_node = master_node
+    self.master_ip = master_ip
     self.config_path = config_path
 
 
@@ -206,7 +212,26 @@ class Merger(object):
                                  (cluster, result.fail_reason, result.output))
       instances = result.stdout.splitlines()
 
-      self.merger_data.append(MergerData(cluster, key_path, nodes, instances))
+      path = utils.PathJoin(constants.DATA_DIR, "ssconf_%s" %
+                            constants.SS_MASTER_NODE)
+      result = self._RunCmd(cluster, "cat %s" % path, private_key=key_path)
+      if result.failed:
+        raise errors.RemoteError("Unable to retrieve the master node name from"
+                                 " %s. Fail reason: %s; output: %s" %
+                                 (cluster, result.fail_reason, result.output))
+      master_node = result.stdout.strip()
+
+      path = utils.PathJoin(constants.DATA_DIR, "ssconf_%s" %
+                            constants.SS_MASTER_IP)
+      result = self._RunCmd(cluster, "cat %s" % path, private_key=key_path)
+      if result.failed:
+        raise errors.RemoteError("Unable to retrieve the master IP from"
+                                 " %s. Fail reason: %s; output: %s" %
+                                 (cluster, result.fail_reason, result.output))
+      master_ip = result.stdout.strip()
+
+      self.merger_data.append(MergerData(cluster, key_path, nodes, instances,
+                                         master_node, master_ip))
 
   def _PrepareAuthorizedKeys(self):
     """Prepare the authorized_keys on every merging node.
@@ -289,6 +314,31 @@ class Merger(object):
                                  " Fail reason: %s; output: %s" %
                                  (cluster, result.fail_reason, result.output))
 
+  def _RemoveMasterIps(self):
+    """Removes the master IPs from the master nodes of each cluster.
+
+    """
+    for data in self.merger_data:
+      master_ip_family = netutils.IPAddress.GetAddressFamily(data.master_ip)
+      master_ip_len = netutils.IP4Address.iplen
+      if master_ip_family == netutils.IP6Address.family:
+        master_ip_len = netutils.IP6Address.iplen
+      # Not using constants.IP_COMMAND_PATH because the command might run on a
+      # machine in which the ip path is different, so it's better to rely on
+      # $PATH.
+      cmd = "ip address del %s/%s dev $(cat %s)" % (
+             data.master_ip,
+             master_ip_len,
+             utils.PathJoin(constants.DATA_DIR, "ssconf_%s" %
+                            constants.SS_MASTER_NETDEV))
+      result = self._RunCmd(data.master_node, cmd, max_attempts=3)
+      if result.failed:
+        raise errors.RemoteError("Unable to remove master IP on %s."
+                                 " Fail reason: %s; output: %s" %
+                                 (data.master_node,
+                                  result.fail_reason,
+                                  result.output))
+
   def _StopDaemons(self):
     """Stop all daemons on merging nodes.
 
@@ -693,6 +743,8 @@ class Merger(object):
       self._StopDaemons()
       logging.info("Merging config")
       self._FetchRemoteConfig()
+      logging.info("Removing master IPs on mergee master nodes")
+      self._RemoveMasterIps()
 
       logging.info("Stopping master daemon")
       self._KillMasterDaemon()