Commit f85de79e authored by Antony Chazapis's avatar Antony Chazapis

Start of modular backend (not working yet).

parent e927fb20
......@@ -34,6 +34,7 @@
from django.conf import settings
from simple import SimpleBackend
from modular import ModularBackend
backend = None
options = getattr(settings, 'BACKEND', None)
......
......@@ -102,6 +102,14 @@ class BaseBackend(object):
"""
return
def put_account(self, user, account):
"""Create a new account with the given name.
Raises:
NotAllowedError: Operation not permitted
"""
return
def delete_account(self, user, account):
"""Delete the account with the given name.
......
# Copyright 2011 GRNET S.A. All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and
# documentation are those of the authors and should not be
# interpreted as representing official policies, either expressed
# or implied, of GRNET S.A.
class DBWorker(object):
"""Database connection handler."""
def __init__(self, **params):
self.params = params
conn = params['connection']
cur = params['cursor']
self.execute = cur.execute
self.executemany = cur.executemany
self.fetchone = cur.fetchone
self.fetchall = cur.fetchall
self.cur = cur
self.conn = conn
# Copyright 2011 GRNET S.A. All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and
# documentation are those of the authors and should not be
# interpreted as representing official policies, either expressed
# or implied, of GRNET S.A.
from dbworker import DBWorker
class Groups(DBWorker):
"""Groups are named collections of members, belonging to an owner."""
def __init__(self, **params):
DBWorker.__init__(self, **params)
execute = self.execute
execute(""" create table if not exists groups
( owner text,
name text,
member text,
primary key (owner, name, member) ) """)
execute(""" create index if not exists idx_groups_member
on groups(member) """)
def group_names(self, owner):
"""List all group names belonging to owner."""
q = "select distinct name from groups where owner = ?"
self.execute(q, (owner,))
return [r[0] for r in self.fetchall()]
def group_list(self, owner):
"""List all (group, member) tuples belonging to owner."""
q = "select name, member from groups where owner = ?"
self.execute(q, (owner,))
return self.fetchall()
def group_add(self, owner, group, member):
"""Add a member to a group."""
q = "insert or ignore into groups (owner, name, member) values (?, ?, ?)"
self.execute(q, (owner, group, member))
def group_addmany(self, owner, group, members):
"""Add members to a group."""
q = "insert or ignore into groups (owner, name, member) values (?, ?, ?)"
self.executemany(q, ((owner, group, member) for member in members))
def group_remove(self, owner, group, member):
"""Remove a member from a group."""
q = "delete from groups where owner = ? and name = ? and member = ?"
self.execute(q, (owner, group, member))
def group_delete(self, owner, group):
"""Delete a group."""
q = "delete from groups where owner = ? and name = ?"
self.execute(q, (owner, group))
def group_destroy(self, owner):
"""Delete all groups belonging to owner."""
q = "delete from groups where owner = ?"
self.execute(q, (owner,))
def group_members(self, owner, group):
"""Return the list of members of a group."""
q = "select member from groups where owner = ? and name = ?"
self.execute(q, (owner, group))
return [r[0] for r in self.fetchall()]
def group_check(self, owner, group, member):
"""Check if a member is in a group."""
q = "select 1 from groups where owner = ? and name = ? and member = ?"
self.execute(q, (group, member))
return bool(self.fetchone())
def group_parents(self, member):
"""Return all (owner, group) tuples that contain member."""
q = "select owner, name from groups where member = ?"
self.execute(q, (member,))
return self.fetchall()
# Copyright 2011 GRNET S.A. All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and
# documentation are those of the authors and should not be
# interpreted as representing official policies, either expressed
# or implied, of GRNET S.A.
from blocker import Blocker
from mapper import Mapper
# Copyright 2011 GRNET S.A. All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and
# documentation are those of the authors and should not be
# interpreted as representing official policies, either expressed
# or implied, of GRNET S.A.
from os import makedirs
from os.path import isdir, realpath, exists, join
from hashlib import new as newhasher
from binascii import hexlify
from pithos.lib.hashfiler.context_file import ContextFile, file_sync_read_chunks
class Blocker(object):
"""Blocker.
Required contstructor parameters: blocksize, blockpath, hashtype.
"""
blocksize = None
blockpath = None
hashtype = None
def __init__(self, **params):
blocksize = params['blocksize']
blockpath = params['blockpath']
blockpath = realpath(blockpath)
if not isdir(blockpath):
if not exists(blockpath):
makedirs(blockpath)
else:
raise ValueError("Variable blockpath '%s' is not a directory" % (blockpath,))
hashtype = params['hashtype']
try:
hasher = newhasher(hashtype)
except ValueError:
msg = "Variable hashtype '%s' is not available from hashlib"
raise ValueError(msg % (hashtype,))
hasher.update("")
emptyhash = hasher.digest()
self.blocksize = blocksize
self.blockpath = blockpath
self.hashtype = hashtype
self.hashlen = len(emptyhash)
self.emptyhash = emptyhash
def get_rear_block(self, blkhash, create=0):
name = join(self.blockpath, hexlify(blkhash))
return ContextFile(name, create)
def check_rear_block(self, blkhash):
name = join(self.blockpath, hexlify(blkhash))
return exists(name)
def block_hash(self, data):
"""Hash a block of data"""
hasher = newhasher(self.hashtype)
hasher.update(data.rstrip('\x00'))
return hasher.digest()
def block_ping(self, hashes):
"""Check hashes for existence and
return those missing from block storage.
"""
missing = []
append = missing.append
for i, h in enumerate(hashes):
if not self.check_rear_block(h):
append(i)
return missing
def block_retr(self, hashes):
"""Retrieve blocks from storage by their hashes."""
blocksize = self.blocksize
blocks = []
append = blocks.append
block = None
for h in hashes:
with self.get_rear_block(h, 0) as rbl:
if not rbl:
break
for block in rbl.sync_read_chunks(blocksize, 1, 0):
break # there should be just one block there
if not block:
break
append(block)
return blocks
def block_stor(self, blocklist):
"""Store a bunch of blocks and return (hashes, missing).
Hashes is a list of the hashes of the blocks,
missing is a list of indices in that list indicating
which blocks were missing from the store.
"""
block_hash = self.block_hash
hashlist = [block_hash(b) for b in blocklist]
mf = None
missing = self.block_ping(hashlist)
for i in missing:
with self.get_rear_block(hashlist[i], 1) as rbl:
rbl.sync_write(blocklist[i]) #XXX: verify?
return hashlist, missing
def block_delta(self, blkhash, offdata=()):
"""Construct and store a new block from a given block
and a list of (offset, data) 'patches'. Return:
(the hash of the new block, if the block already existed)
"""
if not offdata:
return None, None
blocksize = self.blocksize
block = self.block_retr((blkhash,))
if not block:
return None, None
block = block[0]
newblock = ''
idx = 0
size = 0
trunc = 0
for off, data in offdata:
if not data:
trunc = 1
break
newblock += block[idx:off] + data
size += off - idx + len(data)
if size >= blocksize:
break
off = size
if not trunc:
newblock += block[size:len(block)]
h, a = self.block_stor((newblock,))
return h[0], 1 if a else 0
def block_hash_file(self, openfile):
"""Return the list of hashes (hashes map)
for the blocks in a buffered file.
Helper method, does not affect store.
"""
hashes = []
append = hashes.append
block_hash = self.block_hash
for block in file_sync_read_chunks(openfile, self.blocksize, 1, 0):
append(block_hash(block))
return hashes
def block_stor_file(self, openfile):
"""Read blocks from buffered file object and store them. Return:
(bytes read, list of hashes, list of hashes that were missing)
"""
blocksize = self.blocksize
block_stor = self.block_stor
hashlist = []
hextend = hashlist.extend
storedlist = []
sextend = storedlist.extend
lastsize = 0
for block in file_sync_read_chunks(openfile, blocksize, 1, 0):
hl, sl = block_stor((block,))
hextend(hl)
sextend(sl)
lastsize = len(block)
size = (len(hashlist) -1) * blocksize + lastsize if hashlist else 0
return size, hashlist, storedlist
# Copyright 2011 GRNET S.A. All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and
# documentation are those of the authors and should not be
# interpreted as representing official policies, either expressed
# or implied, of GRNET S.A.
from os import SEEK_CUR, SEEK_SET, fsync
from errno import ENOENT
_zeros = ''
def zeros(nr):
global _zeros
size = len(_zeros)
if nr == size:
return _zeros
if nr > size:
_zeros += '\0' * (nr - size)
return _zeros
if nr < size:
_zeros = _zeros[:nr]
return _zeros
def file_sync_write_chunks(openfile, chunksize, offset, chunks, size=None):
"""Write given chunks to the given buffered file object.
Writes never span across chunk boundaries.
If size is given stop after or pad until size bytes have been written.
"""
fwrite = openfile.write
seek = openfile.seek
padding = 0
try:
seek(offset * chunksize)
except IOError, e:
seek = None
for x in xrange(offset):
fwrite(zeros(chunksize))
cursize = offset * chunksize
for chunk in chunks:
if padding:
if seek:
seek(padding -1, SEEK_CUR)
fwrite("\x00")
else:
fwrite(buffer(zeros(chunksize), 0, padding))
if size is not None and cursize + chunksize >= size:
chunk = chunk[:chunksize - (cursize - size)]
fwrite(chunk)
cursize += len(chunk)
break
fwrite(chunk)
padding = chunksize - len(chunk)
padding = size - cursize if size is not None else 0
if padding <= 0:
return
q, r = divmod(padding, chunksize)
for x in xrange(q):
fwrite(zeros(chunksize))
fwrite(buffer(zeros(chunksize), 0, r))
def file_sync_read_chunks(openfile, chunksize, nr, offset=0):
"""Read and yield groups of chunks from a buffered file object at offset.
Reads never span accros chunksize boundaries.
"""
fread = openfile.read
remains = offset * chunksize
seek = openfile.seek
try:
seek(remains)
except IOError, e:
seek = None
while 1:
s = fread(remains)
remains -= len(s)
if remains <= 0:
break
while nr:
remains = chunksize
chunk = ''
while 1:
s = fread(remains)
if not s:
if chunk:
yield chunk
return
chunk += s
remains -= len(s)
if remains <= 0:
break
yield chunk
nr -= 1
class ContextFile(object):
__slots__ = ("name", "fdesc", "create")
def __init__(self, name, create=0):
self.name = name
self.fdesc = None
self.create = create
#self.dirty = 0
def __enter__(self):
name = self.name
try:
fdesc = open(name, 'rb+')
except IOError, e:
if not self.create or e.errno != ENOENT:
raise
fdesc = open(name, 'w+')
self.fdesc = fdesc
return self
def __exit__(self, exc, arg, trace):
fdesc = self.fdesc
if fdesc is not None:
#if self.dirty:
# fsync(fdesc.fileno())
fdesc.close()
return False # propagate exceptions
def seek(self, offset, whence=SEEK_SET):
return self.fdesc.seek(offset, whence)
def tell(self):
return self.fdesc.tell()
def truncate(self, size):
self.fdesc.truncate(size)
def sync_write(self, data):
#self.dirty = 1
self.fdesc.write(data)
def sync_write_chunks(self, chunksize, offset, chunks, size=None):
#self.dirty = 1
return file_sync_write_chunks(self.fdesc, chunksize, offset, chunks, size)
def sync_read(self, size):
read = self.fdesc.read
data = ''
while 1:
s = read(size)
if not s:
break
data += s
return data
def sync_read_chunks(self, chunksize, nr, offset=0):
return file_sync_read_chunks(self.fdesc, chunksize, nr, offset)
# Copyright 2011 GRNET S.A. All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and
# documentation are those of the authors and should not be
# interpreted as representing official policies, either expressed
# or implied, of GRNET S.A.
from os.path import realpath, join, exists, isdir
from os import makedirs, unlink
from errno import ENOENT
from pithos.lib.hashfiler.context_file import ContextFile
class Mapper(object):
"""Mapper.
Required contstructor parameters: mappath, namelen.
"""
mappath = None
namelen = None
def __init__(self, **params):
self.params = params
self.namelen = params['namelen']
mappath = realpath(params['mappath'])
if not isdir(mappath):
if not exists(mappath):
makedirs(