~jelmer/brz/fix-c-extensions

« back to all changes in this revision

Viewing changes to breezy/bzr/btree_index.py

  • Committer: Jelmer Vernooij
  • Date: 2017-07-23 22:06:41 UTC
  • mfrom: (6738 trunk)
  • mto: This revision was merged to the branch mainline in revision 6739.
  • Revision ID: jelmer@jelmer.uk-20170723220641-69eczax9bmv8d6kk
Merge trunk, address review comments.

Show diffs side-by-side

added added

removed removed

Lines of Context:
17
17
 
18
18
"""B+Tree indices"""
19
19
 
20
 
from __future__ import absolute_import
 
20
from __future__ import absolute_import, division
21
21
 
22
22
from ..lazy_import import lazy_import
23
23
lazy_import(globals(), """
84
84
        if self.nodes == 0:
85
85
            self.spool = BytesIO()
86
86
            # padded note:
87
 
            self.spool.write("\x00" * _RESERVED_HEADER_BYTES)
 
87
            self.spool.write(b"\x00" * _RESERVED_HEADER_BYTES)
88
88
        elif self.nodes == 1:
89
89
            # We got bigger than 1 node, switch to a temp file
90
90
            spool = tempfile.TemporaryFile(prefix='bzr-index-row-')
323
323
                        optimize_for_size=optimize_for_size)
324
324
                    internal_row.writer.write(_INTERNAL_FLAG)
325
325
                    internal_row.writer.write(_INTERNAL_OFFSET +
326
 
                        str(rows[pos + 1].nodes) + "\n")
 
326
                        b"%d\n" % rows[pos + 1].nodes)
327
327
            # add a new leaf
328
328
            length = _PAGE_SIZE
329
329
            if rows[-1].nodes == 0:
339
339
                raise errors.BadIndexKey(string_key)
340
340
            # this key did not fit in the node:
341
341
            rows[-1].finish_node()
342
 
            key_line = string_key + "\n"
 
342
            key_line = string_key + b"\n"
343
343
            new_row = True
344
344
            for row in reversed(rows[:-1]):
345
345
                # Mark the start of the next node in the node above. If it
367
367
                    optimize_for_size=self._optimize_for_size)
368
368
                new_row.writer.write(_INTERNAL_FLAG)
369
369
                new_row.writer.write(_INTERNAL_OFFSET +
370
 
                    str(rows[1].nodes - 1) + "\n")
 
370
                    b"%d\n" % (rows[1].nodes - 1))
371
371
                new_row.writer.write(key_line)
372
372
            self._add_key(string_key, line, rows, allow_optimize=allow_optimize)
373
373
 
435
435
            node = row.spool.read(_PAGE_SIZE)
436
436
            result.write(node[reserved:])
437
437
            if len(node) == _PAGE_SIZE:
438
 
                result.write("\x00" * (reserved - position))
 
438
                result.write(b"\x00" * (reserved - position))
439
439
            position = 0 # Only the root row actually has an offset
440
440
            copied_len = osutils.pumpfile(row.spool, result)
441
441
            if copied_len != (row.nodes - 1) * _PAGE_SIZE:
626
626
    def __init__(self, bytes):
627
627
        """Parse bytes to create an internal node object."""
628
628
        # splitlines mangles the \r delimiters.. don't use it.
629
 
        self.keys = self._parse_lines(bytes.split('\n'))
 
629
        self.keys = self._parse_lines(bytes.split(b'\n'))
630
630
 
631
631
    def _parse_lines(self, lines):
632
632
        nodes = []
633
633
        self.offset = int(lines[1][7:])
634
634
        as_st = static_tuple.StaticTuple.from_sequence
635
635
        for line in lines[2:]:
636
 
            if line == '':
 
636
            if line == b'':
637
637
                break
638
638
            # GZ 2017-05-24: Used to intern() each chunk of line as well, need
639
639
            # to recheck performance and perhaps adapt StaticTuple to adjust.
688
688
        self._row_lengths = None
689
689
        self._row_offsets = None # Start of each row, [-1] is the end
690
690
 
 
691
    def __hash__(self):
 
692
        return id(self)
 
693
 
691
694
    def __eq__(self, other):
692
695
        """Equal when self and other were created with the same parameters."""
693
696
        return (
734
737
        pages fit in that length.
735
738
        """
736
739
        recommended_read = self._transport.recommended_page_size()
737
 
        recommended_pages = int(math.ceil(recommended_read /
738
 
                                          float(_PAGE_SIZE)))
 
740
        recommended_pages = int(math.ceil(recommended_read / _PAGE_SIZE))
739
741
        return recommended_pages
740
742
 
741
743
    def _compute_total_pages_in_index(self):
752
754
            return self._row_offsets[-1]
753
755
        # This is the number of pages as defined by the size of the index. They
754
756
        # should be indentical.
755
 
        total_pages = int(math.ceil(self._size / float(_PAGE_SIZE)))
 
757
        total_pages = int(math.ceil(self._size / _PAGE_SIZE))
756
758
        return total_pages
757
759
 
758
760
    def _expand_offsets(self, offsets):
1516
1518
                node = _InternalNode(bytes)
1517
1519
            else:
1518
1520
                raise AssertionError("Unknown node type for %r" % bytes)
1519
 
            yield offset / _PAGE_SIZE, node
 
1521
            yield offset // _PAGE_SIZE, node
1520
1522
 
1521
1523
    def _signature(self):
1522
1524
        """The file signature for this index type."""