blocker.py 7.09 KB
Newer Older
Antony Chazapis's avatar
Antony Chazapis committed
1
# Copyright 2011-2012 GRNET S.A. All rights reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38
# 
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the following
# conditions are met:
# 
#   1. Redistributions of source code must retain the above
#      copyright notice, this list of conditions and the following
#      disclaimer.
# 
#   2. Redistributions in binary form must reproduce the above
#      copyright notice, this list of conditions and the following
#      disclaimer in the documentation and/or other materials
#      provided with the distribution.
# 
# THIS SOFTWARE IS PROVIDED BY GRNET S.A. ``AS IS'' AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRNET S.A OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# 
# The views and conclusions contained in the software and
# documentation are those of the authors and should not be
# interpreted as representing official policies, either expressed
# or implied, of GRNET S.A.

from os import makedirs
from os.path import isdir, realpath, exists, join
from hashlib import new as newhasher
from binascii import hexlify

Antony Chazapis's avatar
Antony Chazapis committed
39
from context_file import ContextFile, file_sync_read_chunks
40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76


class Blocker(object):
    """Blocker.
       Required contstructor parameters: blocksize, blockpath, hashtype.
    """

    blocksize = None
    blockpath = None
    hashtype = None

    def __init__(self, **params):
        blocksize = params['blocksize']
        blockpath = params['blockpath']
        blockpath = realpath(blockpath)
        if not isdir(blockpath):
            if not exists(blockpath):
                makedirs(blockpath)
            else:
                raise ValueError("Variable blockpath '%s' is not a directory" % (blockpath,))

        hashtype = params['hashtype']
        try:
            hasher = newhasher(hashtype)
        except ValueError:
            msg = "Variable hashtype '%s' is not available from hashlib"
            raise ValueError(msg % (hashtype,))

        hasher.update("")
        emptyhash = hasher.digest()

        self.blocksize = blocksize
        self.blockpath = blockpath
        self.hashtype = hashtype
        self.hashlen = len(emptyhash)
        self.emptyhash = emptyhash

Antony Chazapis's avatar
Antony Chazapis committed
77
    def _get_rear_block(self, blkhash, create=0):
78 79 80 81 82
        filename = hexlify(blkhash)
        dir = join(self.blockpath, filename[0:2], filename[2:4], filename[4:6])
        if not exists(dir):
            makedirs(dir)
        name = join(dir, filename)
83 84
        return ContextFile(name, create)

Antony Chazapis's avatar
Antony Chazapis committed
85
    def _check_rear_block(self, blkhash):
86 87 88
        filename = hexlify(blkhash)
        dir = join(self.blockpath, filename[0:2], filename[2:4], filename[4:6])
        name = join(dir, filename)
89 90 91 92 93 94 95 96 97 98 99 100
        return exists(name)

    def block_hash(self, data):
        """Hash a block of data"""
        hasher = newhasher(self.hashtype)
        hasher.update(data.rstrip('\x00'))
        return hasher.digest()

    def block_ping(self, hashes):
        """Check hashes for existence and
           return those missing from block storage.
        """
101 102 103 104 105 106 107 108
        notfound = []
        append = notfound.append

        for h in hashes:
            if h not in notfound and not self._check_rear_block(h):
                append(h)

        return notfound
109 110 111 112 113 114 115 116 117

    def block_retr(self, hashes):
        """Retrieve blocks from storage by their hashes."""
        blocksize = self.blocksize
        blocks = []
        append = blocks.append
        block = None

        for h in hashes:
Antony Chazapis's avatar
Antony Chazapis committed
118 119 120
            if h == self.emptyhash:
                append('')
                continue
Antony Chazapis's avatar
Antony Chazapis committed
121
            with self._get_rear_block(h, 0) as rbl:
122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140
                if not rbl:
                    break
                for block in rbl.sync_read_chunks(blocksize, 1, 0):
                    break # there should be just one block there
            if not block:
                break
            append(block)

        return blocks

    def block_stor(self, blocklist):
        """Store a bunch of blocks and return (hashes, missing).
           Hashes is a list of the hashes of the blocks,
           missing is a list of indices in that list indicating
           which blocks were missing from the store.
        """
        block_hash = self.block_hash
        hashlist = [block_hash(b) for b in blocklist]
        mf = None
Antony Chazapis's avatar
Antony Chazapis committed
141
        missing = [i for i, h in enumerate(hashlist) if not self._check_rear_block(h)]
142
        for i in missing:
Antony Chazapis's avatar
Antony Chazapis committed
143
            with self._get_rear_block(hashlist[i], 1) as rbl:
144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216
                 rbl.sync_write(blocklist[i]) #XXX: verify?

        return hashlist, missing

    def block_delta(self, blkhash, offdata=()):
        """Construct and store a new block from a given block
           and a list of (offset, data) 'patches'. Return:
           (the hash of the new block, if the block already existed)
        """
        if not offdata:
            return None, None

        blocksize = self.blocksize
        block = self.block_retr((blkhash,))
        if not block:
            return None, None

        block = block[0]
        newblock = ''
        idx = 0
        size = 0
        trunc = 0
        for off, data in offdata:
            if not data:
                trunc = 1
                break
            newblock += block[idx:off] + data
            size += off - idx + len(data)
            if size >= blocksize:
                break
            off = size

        if not trunc:
            newblock += block[size:len(block)]

        h, a = self.block_stor((newblock,))
        return h[0], 1 if a else 0

    def block_hash_file(self, openfile):
        """Return the list of hashes (hashes map)
           for the blocks in a buffered file.
           Helper method, does not affect store.
        """
        hashes = []
        append = hashes.append
        block_hash = self.block_hash

        for block in file_sync_read_chunks(openfile, self.blocksize, 1, 0):
            append(block_hash(block))

        return hashes

    def block_stor_file(self, openfile):
        """Read blocks from buffered file object and store them. Return:
           (bytes read, list of hashes, list of hashes that were missing)
        """
        blocksize = self.blocksize
        block_stor = self.block_stor
        hashlist = []
        hextend = hashlist.extend
        storedlist = []
        sextend = storedlist.extend
        lastsize = 0

        for block in file_sync_read_chunks(openfile, blocksize, 1, 0):
            hl, sl = block_stor((block,))
            hextend(hl)
            sextend(sl)
            lastsize = len(block)

        size = (len(hashlist) -1) * blocksize + lastsize if hashlist else 0
        return size, hashlist, storedlist