Commit 15ce514d authored by Dimitris Aragiorgis's avatar Dimitris Aragiorgis Committed by Christos Stavrakakis

Add snf-deploy tree

Signed-off-by: default avatarDimitris Aragiorgis <dimara@grnet.gr>
parent 254d3991
*.pyc
lala.tmp
kamaki-fab
images/*
*.pid
*.monitor
packages
*.swp
include README Changelog
include distribute_setup.py
#!/bin/bash
usage(){
echo "
Usage: $0: [options]
-h, --help Prints this help message
--debian [branch] Local debian branch to use (default debian)
--upstream [branch] Local upstream branch to use (default master)
--remote [repo] Remote repo to use (default origin)
--packages [dir] Where to store the created packages (default ~/packages)
--validate Fetch remote repo branches and
check if local are up-to-date (default false)
--push Whether to push upstream (default false)
"
exit 1
}
parse_git_branch()
{
git branch 2> /dev/null | grep '^*' | sed 's/^*\ //g'
}
die()
{
echo -e $* 1>&2
echo Aborting.
exit 1
}
cleanup()
{
trap - EXIT
echo -n Cleaning up...
if [ ${#CLEANUP[*]} -gt 0 ]; then
LAST_ELEMENT=$((${#CLEANUP[*]}-1))
REVERSE_INDEXES=$(seq ${LAST_ELEMENT} -1 0)
for i in $REVERSE_INDEXES; do
local cmd=${CLEANUP[$i]}
$cmd
done
fi
echo "done"
}
add_cleanup() {
local cmd=""
for arg; do cmd+=$(printf "%q " "$arg"); done
CLEANUP+=("$cmd")
}
add_checkpoint()
{
commit=$(git reflog | head -n1 | cut -f 1 -d " ")
add_cleanup git reset --hard $commit
LASTCHECKPOINT=$commit
}
CLEANUP=( )
TEMP=$(getopt -o h --long help,validate,push,packages:,upstream:,debian:,remote: -n 'autopkg.sh' -- "$@")
if [ $? != 0 ] ; then echo "Terminating..." >&2 ; exit 1 ; fi
eval set -- "$TEMP"
while true ; do
case "$1" in
-h|--help) usage ;;
--upstream) LOCALUPSTREAM=$2 ; shift 2 ;;
--debian) LOCALDEBIAN=$2 ; shift 2 ;;
--remote) REMOTE=$2 ; shift 2 ;;
--packages) PKGAREA=$2 ; shift 2 ;;
--validate) VALIDATE=true ; shift ;;
--push) PUSH=true ; shift ;;
--) shift ; break ;;
*) echo "Internal error!" ; usage ;;
esac
done
# The root of the git repository, no matter where we're called from
TOPLEVEL="$(git rev-parse --show-toplevel)"
: ${LOCALUPSTREAM:=$(parse_git_branch)}
: ${LOCALDEBIAN:=debian}
: ${REMOTE:=origin}
: ${VALIDATE:=false}
: ${PUSH:=false}
: ${PKGAREA:=~/packages}
: ${BACKUPAREA:=~/backup}
cd "$TOPLEVEL"
# Prerequisites: Test all important directories exist
test -d "$PKGAREA" || die "Package area directory $PKGAREA missing"
test -d "$BACKUPAREA" || die "Backup area directory $BACKUPAREA missing"
# Prerequisite: Test the dialog utility is available
dialog --help &>/dev/null || die "Could not run the 'dialog' utility"
BUILDAREA=$(mktemp -d --tmpdir=/tmp build-area.XXX)
add_cleanup rm -r $BUILDAREA
echo "############################################################################"
echo "Will build packages under $BUILDAREA"
echo "Local upstream branch: $LOCALUPSTREAM"
echo "Local debian branch: $LOCALDEBIAN"
$VALIDATE && echo "Will fetch $REMOTE and check if $LOCALUPSTREAM and $LOCALDEBIAN are up-to-date"
echo "############################################################################"
echo "Press Enter to continue..."
read
MODIFIED=$(git status --short | grep -v "??")
test -z "$MODIFIED" || die "error: Repository is dirty. Commit your local changes:\n $MODIFIED"
set -e
trap cleanup EXIT
add_checkpoint
# Create a temporary debian branch to do everything
TMPDEBIAN=$(mktemp -u debian.XXX)
git branch --track $TMPDEBIAN $LOCALDEBIAN
add_cleanup git branch -D $TMPDEBIAN
git checkout $TMPDEBIAN
add_cleanup git checkout $LOCALUPSTREAM
# Whether we are in snapshot or release mode
snap=false
mrgextra=-m
dchextra=-R
mrgmsg="Merge branch '$LOCALUPSTREAM' into $LOCALDEBIAN"
dialog --yesno "Create Snapshot?" 5 20 && snap=true && dchextra=-S && mrgextra= && mrgmsg=
# merge local branch into tmp branch with a nice commit message,
# so it can be pushed as is to upstream debian
export GIT_MERGE_AUTOEDIT=no
git merge $mrgextra ${mrgextra:+"$mrgmsg"} $LOCALUPSTREAM
# auto edit Debian changelog depending on Snapshot or Release mode
export EDITOR=/usr/bin/vim
git-dch --debian-branch=$TMPDEBIAN --git-author --ignore-regex=".*" --multimaint-merge --since=HEAD $dchextra
git add debian/changelog
# get version from the changelog
# we add a git tag here, so setup.py sdist works as expected
# FIXME: This is a workaround for the way Synnefo packages determine
# the versions for their Python packages
version=$(IFS="()" ; read x v x < debian/changelog ; echo $v)
if ! $snap; then
git commit -s -a -m "Bump new upstream version"
TAGFILE=$(mktemp -t tag.XXX)
add_cleanup rm $TAGFILE
dialog --inputbox "New Debian Tag: " 5 30 "debian/$version" 2>$TAGFILE
git tag $(<$TAGFILE)
add_cleanup git tag -d $(<$TAGFILE)
fi
add_cleanup git reset --hard HEAD
# Build all packages
git-buildpackage --git-export-dir="$BUILDAREA" \
--git-upstream-branch=$LOCALUPSTREAM \
--git-debian-branch=$TMPDEBIAN \
--git-export=INDEX \
--git-ignore-new -sa
# do some dirty backup
# pkgarea might be needed by auto-deploy tool
rm -f "$PKGAREA"/* || true
cp -v "$BUILDAREA"/* "$PKGAREA"/ || true
cp -v "$BUILDAREA"/* "$BACKUPAREA"/ || true
function check_remote(){
git fetch $1 2>/dev/null || die "Could not fetch $1"
git fetch $1 $2 2>/dev/null|| die "Could not fetch $1/$2"
commits_behind=$(git rev-list $2..$1/$2 | wc -l)
if [ $commits_behind -ne 0 ]; then
die "Your local branch is outdated. Please run:\ngit pull --rebase $1/$2"
fi
}
if $VALIDATE; then
check_remote $REMOTE $LOCALUPSTREAM
check_remote $REMOTE $LOCALDEBIAN
fi
# trap - EXIT
# here we can push the commits to the remote debian branch as they are
echo
echo "#################################################"
echo "## SUCCESS ##"
echo "#################################################"
if $PUSH; then
git push --tags $REMOTE $TMPDEBIAN:$LOCALDEBIAN
git push $REMOTE $LOCALUPSTREAM:$LOCALUPSTREAM
fi
exit 0
[packages]
# whether to use apt-get or local generated package found in packages dir
use_local_packages = True
# url to obtain latest synnefo packages.
# To use them change USE_LOCAL_PACKAGES setting to yes
# To get them run: snf-deploy packages
package_url = http://builder.dev.grnet.gr/synnefo/packages/Squeeze/40/
[dirs]
# dir to find all template files used to customize setup
# in case you want to add another setting please modify the corresponding file
templates = /var/lib/snf-deploy/files
# dir to store local images (disk0, disk1 of the virtual cluster)
images = /var/lib/snf-deploy/images
# dir to store/find local packages
# dir to locally save packages that will be downloaded from package_url
# put here any locally created packages (useful for development)
packages = /var/lib/snf-deploy/packages
# dir to store pidfiles (dnsmasq, kvm)
run = /var/run/snf-deploy
# dir to store dnsmasq related files
dns = /var/lib/snf-deploy/dnsmasq
# dir to lookup fabfile and ifup script
lib = /usr/lib/snf-deploy
# dir to store executed commands (to enforce sequential execution)
cmd = /var/run/snf-deploy/cmd
[ganeti1]
cluster_nodes = node1
master_node = node1
cluster_netdev = eth0
cluster_name = ganeti1
cluster_ip = 192.168.0.13
vg = autovg
synnefo_public_network_subnet = 10.0.1.0/24
synnefo_public_network_gateway = 10.0.1.1
synnefo_public_network_type = CUSTOM
image_dir = /srv/okeanos
# To add another cluster repeat the above section
# with different header and nodes
# please note that currently is only supported deployment
# with nodes (both ganeti and synnefo) residing in the same subnet/domain
[network]
domain = synnefo.live
subnet = 192.168.0.0/28
gateway = 192.168.0.14
[hostnames]
node1 = auto1
# node2 = auto2
[ips]
node1 = 192.168.0.1
# node2 = 192.168.0.2
# This is used only in case of vcluster
# needed to pass the correct dhcp responces to the virtual nodes
[macs]
node1 = 52:54:00:00:00:01
# node2 = 52:54:00:00:00:02
[info]
# Here we define which nodes from the predefined ones to use
nodes = node1
# login credentials for the nodes
# please note that in case of vcluster these are preconfigured
# and not editable.
# in case of physical nodes all nodes should have the same login account
user = root
password = 12345
public_iface = eth0
vm_public_iface = eth1
vm_private_iface = eth2
# extra disk name inside the nodes
# if defined, snf-deploy will create a VG for ganeti in order to support lvm storage
# if not then only file disk template will be supported
extra_disk = /dev/vdb
[debian]
rabbitmq-server = testing
gunicorn = squeeze-backports
qemu-kvm = squeeze-backports
qemu = squeeze-backports
python-gevent = squeeze-backports
apache2 =
postgresql =
python-psycopg2 =
python-argparse =
nfs-kernel-server = squeeze-backports
nfs-common = squeeze-backports
bind9 =
vlan =
vlan =
lvm2 =
curl =
memcached =
python-memcache =
bridge-utils =
python-progress =
ganeti-instance-debootstrap =
[synnefo]
snf-astakos-app = stable
snf-common = stable
snf-cyclades-app = stable
snf-cyclades-gtools = stable
snf-django-lib = stable
python-astakosclient = stable
python-objpool = stable
snf-branding = stable
snf-webproject = stable
snf-pithos-app = stable
snf-pithos-backend = stable
snf-tools = stable
python-django-south = stable
[ganeti]
snf-ganeti = 2.6.2+ippool11+hotplug5+extstorage3+rbdfix1+kvmfix2+nolvm+netxen-1~squeeze
ganeti-htools = 2.6.2+ippool11+hotplug5+extstorage3+rbdfix1+kvmfix2+nolvm+netxen-1~squeeze
[other]
snf-cloudcms = stable
snf-vncauthproxy = stable
snf-pithos-webclient = stable
snf-image = stable
snf-network = stable
nfdhcpd = stable
kamaki = stable
python-bitarray = stable
nfqueue-bindings-python = stable
[cred]
synnefo_user = synnefo
synnefo_db_passwd = example_passw0rd
synnefo_rapi_passwd = example_rapi_passw0rd
synnefo_rabbitmq_passwd = example_rabbitmq_passw0rd
user_email = user@synnefo.org
user_name = John
user_lastname = Doe
user_passwd = 12345
[roles]
accounts = node1
compute = node1
object-store = node1
cyclades = node1
pithos = node1
cms = node1
db = node1
mq = node1
ns = node1
client = node1
router = node1
[synnefo]
pithos_dir = /srv/pithos
vm_public_bridge = br0
vm_private_bridge = prv0
common_bridge = br0
debian_base_url = https://pithos.okeanos.grnet.gr/public/RDISy7sNVIJ9KIm4JkmbX4
[image]
# url to get the base image. This is a debian base image with preconfigured
# root password and installed rsa/dsa keys. Plus a NetworkManager hook that
# changes the VM's name based on info provided by dhcp response.
# To create it run: snf-deploy image
squeeze_image_url = https://pithos.okeanos.grnet.gr/public/832xv
ubuntu_image_url =
# in order ganeti nodes to support lvm storage (plain disk template) it will
# be needed an extra disk to eventually be able to create a VG. Ganeti requires
# this VG to be at least of 30GB. To this end in order the virtual nodes to have
# this extra disk an image should be created locally. There are three options:
# 1. not create an extra disk (only file storage template will be supported)
# 2. create an image of 30G in image dir (default /var/lib/snf-deploy/images)
# using dd if=/dev/zero of=squeeze.disk1
# 3. create this image in a local VG using lvgreate -L30G squeeze.disk1 lvg
# and create a symbolic link in /var/lib/snf-deploy/images
# Whether to create an extra disk or not
create_extra_disk = False
# lvg is the name of the local VG if any
lvg =
# OS istalled in the virtual cluster
os = squeeze
[cluster]
# the bridge to use for the virtual cluster
# on this bridge we will launch a dnsnmasq and provide
# fqdns needed to the cluster.
# In ordrer cluster nodes to have internet access, host must do NAT.
# iptables -t nat -A POSTROUTING -s 192.0.0.0/28 -j MASQUERADE
# ip addr add 192.0.0.14/28 dev auto_nodes_br
# To create run: snf-deploy cluster
bridge = auto_nodes_br
#!python
"""Bootstrap distribute installation
If you want to use setuptools in your package's setup.py, just include this
file in the same directory with it, and add this to the top of your setup.py::
from distribute_setup import use_setuptools
use_setuptools()
If you want to require a specific version of setuptools, set a download
mirror, or use an alternate download directory, you can do so by supplying
the appropriate options to ``use_setuptools()``.
This file can also be run as a script to install or upgrade setuptools.
"""
import os
import sys
import time
import fnmatch
import tempfile
import tarfile
from distutils import log
try:
from site import USER_SITE
except ImportError:
USER_SITE = None
try:
import subprocess
def _python_cmd(*args):
args = (sys.executable,) + args
return subprocess.call(args) == 0
except ImportError:
# will be used for python 2.3
def _python_cmd(*args):
args = (sys.executable,) + args
# quoting arguments if windows
if sys.platform == 'win32':
def quote(arg):
if ' ' in arg:
return '"%s"' % arg
return arg
args = [quote(arg) for arg in args]
return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
DEFAULT_VERSION = "0.6.10"
DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
SETUPTOOLS_FAKED_VERSION = "0.6c11"
SETUPTOOLS_PKG_INFO = """\
Metadata-Version: 1.0
Name: setuptools
Version: %s
Summary: xxxx
Home-page: xxx
Author: xxx
Author-email: xxx
License: xxx
Description: xxx
""" % SETUPTOOLS_FAKED_VERSION
def _install(tarball):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# installing
log.warn('Installing Distribute')
if not _python_cmd('setup.py', 'install'):
log.warn('Something went wrong during the installation.')
log.warn('See the error message above.')
finally:
os.chdir(old_wd)
def _build_egg(egg, tarball, to_dir):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# building an egg
log.warn('Building a Distribute egg in %s', to_dir)
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
finally:
os.chdir(old_wd)
# returning the result
log.warn(egg)
if not os.path.exists(egg):
raise IOError('Could not build the egg.')
def _do_download(version, download_base, to_dir, download_delay):
egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg'
% (version, sys.version_info[0], sys.version_info[1]))
if not os.path.exists(egg):
tarball = download_setuptools(version, download_base,
to_dir, download_delay)
_build_egg(egg, tarball, to_dir)
sys.path.insert(0, egg)
import setuptools
setuptools.bootstrap_install_from = egg
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, download_delay=15, no_fake=True):
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
was_imported = 'pkg_resources' in sys.modules or \
'setuptools' in sys.modules
try:
try:
import pkg_resources
if not hasattr(pkg_resources, '_distribute'):
if not no_fake:
_fake_setuptools()
raise ImportError
except ImportError:
return _do_download(version, download_base, to_dir, download_delay)
try:
pkg_resources.require("distribute>="+version)
return
except pkg_resources.VersionConflict:
e = sys.exc_info()[1]
if was_imported:
sys.stderr.write(
"The required version of distribute (>=%s) is not available,\n"
"and can't be installed while this script is running. Please\n"
"install a more recent version first, using\n"
"'easy_install -U distribute'."
"\n\n(Currently using %r)\n" % (version, e.args[0]))
sys.exit(2)
else:
del pkg_resources, sys.modules['pkg_resources'] # reload ok
return _do_download(version, download_base, to_dir,
download_delay)
except pkg_resources.DistributionNotFound:
return _do_download(version, download_base, to_dir,
download_delay)
finally:
if not no_fake:
_create_fake_setuptools_pkg_info(to_dir)
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, delay=15):
"""Download distribute from a specified location and return its filename
`version` should be a valid distribute version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download
attempt.
"""
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
tgz_name = "distribute-%s.tar.gz" % version
url = download_base + tgz_name
saveto = os.path.join(to_dir, tgz_name)
src = dst = None
if not os.path.exists(saveto): # Avoid repeated downloads
try:
log.warn("Downloading %s", url)
src = urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = src.read()
dst = open(saveto, "wb")
dst.write(data)
finally:
if src:
src.close()
if dst:
dst.close()
return os.path.realpath(saveto)
def _no_sandbox(function):
def __no_sandbox(*args, **kw):
try:
from setuptools.sandbox import DirectorySandbox
if not hasattr(DirectorySandbox, '_old'):
def violation(*args):
pass
DirectorySandbox._old = DirectorySandbox._violation
DirectorySandbox._violation = violation
patched = True
else:
patched = False
except ImportError:
patched = False
try:
return function(*args, **kw)
finally:
if patched:
DirectorySandbox._violation = DirectorySandbox._old
del DirectorySandbox._old
return __no_sandbox