~zfilenet/zfilenet/release-0.1

« back to all changes in this revision

Viewing changes to zfilenet/indexserver/rpcapi.py

  • Committer: Ross Light
  • Date: 2008-11-17 01:26:46 UTC
  • Revision ID: rlight2@gmail.com-20081117012646-u1onewq7fpkn3h16
Fixed JSON-RPC upload overflow bug

Show diffs side-by-side

added added

removed removed

Lines of Context:
25
25
 
26
26
from __future__ import absolute_import
27
27
from base64 import b64decode, b64encode
 
28
from cStringIO import StringIO
28
29
import functools
29
30
from hashlib import sha1
30
31
import logging
35
36
 
36
37
from . import blobmanager, errors, index
37
38
from .. import discovery, util
 
39
from ..blobserver import MAX_BLOB_SIZE
38
40
 
39
41
__author__ = 'Ross Light'
40
42
__date__ = 'June 27, 2008'
238
240
            node.clear()
239
241
        writer = self.blob_server_list.get_writer()
240
242
        self.uploads[node.uuid] = dict(blobs=[], sha1_hash=sha1(), length=0,
241
 
                                       writer=writer,)
 
243
                                       writer=writer,
 
244
                                       transfer_buffer=StringIO(),)
242
245
        return str(node.uuid)
243
246
    
244
247
    @expose('upload.write')
249
252
            upload = self.uploads[file_id]
250
253
        except KeyError:
251
254
            raise pinet.jsonrpc.RPCError(1, "Upload does not exist")
252
 
        # Write blob to blob servers
253
 
        new_blob_id = uuid4()
 
255
        # Update data
254
256
        decoded_data = b64decode(data)
 
257
        upload['transfer_buffer'].write(decoded_data)
255
258
        upload['sha1_hash'].update(decoded_data)
256
259
        upload['length'] += len(decoded_data)
257
 
        upload['blobs'].append(new_blob_id)
258
 
        # TODO: Split into chunks
259
 
        upload['writer'].write_blob(new_blob_id, decoded_data)
 
260
        self._flush_blobs(upload)
 
261
    
 
262
    @staticmethod
 
263
    def _flush_blobs(upload, force=False):
 
264
        transfer_buffer = upload['transfer_buffer']
 
265
        while (transfer_buffer.tell() > 0 and
 
266
               (force or transfer_buffer.tell() >= MAX_BLOB_SIZE)):
 
267
            new_blob_id = uuid4()
 
268
            new_blob_data = transfer_buffer.getvalue()[:MAX_BLOB_SIZE]
 
269
            remaining_data = transfer_buffer.getvalue()[MAX_BLOB_SIZE:]
 
270
            # Write blob to servers
 
271
            upload['blobs'].append(new_blob_id)
 
272
            upload['writer'].write_blob(new_blob_id, new_blob_data)
 
273
            # Update buffer
 
274
            transfer_buffer.truncate(0)
 
275
            transfer_buffer.write(remaining_data)
260
276
    
261
277
    @expose('upload.close')
262
278
    @handle_index_errors
268
284
        except KeyError:
269
285
            raise pinet.jsonrpc.RPCError(1, "Upload does not exist")
270
286
        upload_file = index.IndexObject.by_uuid(session, file_id)
 
287
        self._flush_blobs(upload, force=True)
271
288
        upload_file.length = upload['length']
272
289
        upload_file.sha1_hash = upload['sha1_hash'].digest()
273
290
        upload_file.blob_list = upload['blobs']