~ubuntu-branches/ubuntu/trusty/swift/trusty-updates

« back to all changes in this revision

Viewing changes to swift/obj/server.py

  • Committer: Package Import Robot
  • Author(s): Chuck Short, Soren Hansen, Chuck Short
  • Date: 2012-09-07 19:02:36 UTC
  • mfrom: (1.2.12)
  • Revision ID: package-import@ubuntu.com-20120907190236-fqrmbzm7v6zivs8d
Tags: 1.7.0-0ubuntu1
[ Soren Hansen ]
* Update debian/watch to account for symbolically named tarballs and
  use newer URL.
* Run unit tests at build time.
* Fix Launchpad URLs in debian/watch.

[ Chuck Short ]
* New upstream release
* debian/control: Add pubthon-moc as a build dep
* debian/rules: Dont fail if testsuite fails.

Show diffs side-by-side

added added

removed removed

Lines of Context:
44
44
    check_float, check_utf8
45
45
from swift.common.exceptions import ConnectionTimeout, DiskFileError, \
46
46
    DiskFileNotExist
47
 
from swift.obj.replicator import tpooled_get_hashes, invalidate_hash, \
48
 
    quarantine_renamer
 
47
from swift.obj.replicator import tpool_reraise, invalidate_hash, \
 
48
    quarantine_renamer, get_hashes
49
49
from swift.common.http import is_success, HTTPInsufficientStorage, \
50
50
    HTTPClientDisconnect
51
51
 
230
230
                if verify_file:
231
231
                    self._handle_close_quarantine()
232
232
            except (Exception, Timeout), e:
233
 
                import traceback
234
233
                self.logger.error(_('ERROR DiskFile %(data_file)s in '
235
234
                     '%(data_dir)s close failure: %(exc)s : %(stack)'),
236
235
                     {'exc': e, 'stack': ''.join(traceback.format_stack()),
369
368
        self.keep_cache_size = int(conf.get('keep_cache_size', 5242880))
370
369
        self.keep_cache_private = \
371
370
            conf.get('keep_cache_private', 'false').lower() in TRUE_VALUES
372
 
        self.log_requests = conf.get('log_requests', 't')[:1].lower() == 't'
 
371
        self.log_requests = \
 
372
            conf.get('log_requests', 'true').lower() in TRUE_VALUES
373
373
        self.max_upload_time = int(conf.get('max_upload_time', 86400))
374
374
        self.slow = int(conf.get('slow', 0))
375
375
        self.bytes_per_sync = int(conf.get('mb_per_sync', 512)) * 1024 * 1024
471
471
        :param headers_in: dictionary of headers from the original request
472
472
        :param objdevice: device name that the object is in
473
473
        """
 
474
        # Quick cap that will work from now until Sat Nov 20 17:46:39 2286
 
475
        # At that time, Swift will be so popular and pervasive I will have
 
476
        # created income for thousands of future programmers.
 
477
        delete_at = max(min(delete_at, 9999999999), 0)
474
478
        host = partition = contdevice = None
475
479
        headers_out = {'x-timestamp': headers_in['x-timestamp'],
476
480
                       'x-trans-id': headers_in.get('x-trans-id', '-')}
586
590
        last_sync = 0
587
591
        with file.mkstemp() as (fd, tmppath):
588
592
            if 'content-length' in request.headers:
589
 
                fallocate(fd, int(request.headers['content-length']))
 
593
                try:
 
594
                    fallocate(fd, int(request.headers['content-length']))
 
595
                except OSError:
 
596
                    return HTTPInsufficientStorage(drive=device,
 
597
                                                   request=request)
590
598
            reader = request.environ['wsgi.input'].read
591
599
            for chunk in iter(lambda: reader(self.network_chunk_size), ''):
592
600
                upload_size += len(chunk)
860
868
        if not os.path.exists(path):
861
869
            mkdirs(path)
862
870
        suffixes = suffix.split('-') if suffix else []
863
 
        _junk, hashes = tpool.execute(tpooled_get_hashes, path,
864
 
                                      recalculate=suffixes)
865
 
        # See tpooled_get_hashes "Hack".
866
 
        if isinstance(hashes, BaseException):
867
 
            self.logger.increment('REPLICATE.errors')
868
 
            raise hashes
 
871
        _junk, hashes = tpool_reraise(get_hashes, path, recalculate=suffixes)
869
872
        self.logger.timing_since('REPLICATE.timing', start_time)
870
873
        return Response(body=pickle.dumps(hashes))
871
874