~ubuntu-branches/ubuntu/natty/bzr/natty-proposed

« back to all changes in this revision

Viewing changes to bzrlib/repofmt/groupcompress_repo.py

  • Committer: Bazaar Package Importer
  • Author(s): Jelmer Vernooij
  • Date: 2010-08-07 00:54:52 UTC
  • mfrom: (1.4.8 upstream)
  • Revision ID: james.westby@ubuntu.com-20100807005452-g4zb99ezl3xn44r4
Tags: 2.2.0-1
* New upstream release.
 + Adds support for setting timestamps to originating revisions.
   Closes: #473450
 + Removes remaining string exception. Closes: #585193, LP: #586926
 + Add C extension to work around Python issue 1628205. LP: #583941,
   Closes: #577110
 + Avoids showing progress bars when --quiet is used. Closes: #542105,
   LP: #320035
 + No longer creates ~/.bazaar as root when run under sudo. LP: #376388
 + 'bzr commit' now supports -p as alternative for --show-diff. LP: #571467
 + 'bzr add' no longer adds .THIS/.BASE/.THEIRS files unless
   explicitly requested. LP: #322767
 + When parsing patch files, Bazaar now supports diff lines before each
   patch. LP: #502076
 + WorkingTrees now no longer requires using signal.signal, so can
   be used in a threaded environment. LP: #521989
 + An assertion error is no longer triggered when pushing to a pre-1.6
   Bazaar server. LP: #528041
* Bump standards version to 3.9.1.

Show diffs side-by-side

added added

removed removed

Lines of Context:
29
29
    knit,
30
30
    osutils,
31
31
    pack,
32
 
    remote,
33
32
    revision as _mod_revision,
34
33
    trace,
35
34
    ui,
263
262
        remaining_keys = set(keys)
264
263
        counter = [0]
265
264
        if self._gather_text_refs:
266
 
            bytes_to_info = inventory.CHKInventory._bytes_to_utf8name_key
267
265
            self._text_refs = set()
268
266
        def _get_referenced_stream(root_keys, parse_leaf_nodes=False):
269
267
            cur_keys = root_keys
290
288
                    # Store is None, because we know we have a LeafNode, and we
291
289
                    # just want its entries
292
290
                    for file_id, bytes in node.iteritems(None):
293
 
                        name_utf8, file_id, revision_id = bytes_to_info(bytes)
294
 
                        self._text_refs.add((file_id, revision_id))
 
291
                        self._text_refs.add(chk_map._bytes_to_text_key(bytes))
295
292
                def next_stream():
296
293
                    stream = source_vf.get_record_stream(cur_keys,
297
294
                                                         'as-requested', True)
648
645
        chk_diff = chk_map.iter_interesting_nodes(
649
646
            chk_bytes_no_fallbacks, root_key_info.interesting_root_keys,
650
647
            root_key_info.uninteresting_root_keys)
651
 
        bytes_to_info = inventory.CHKInventory._bytes_to_utf8name_key
652
648
        text_keys = set()
653
649
        try:
654
 
            for record in _filter_text_keys(chk_diff, text_keys, bytes_to_info):
 
650
            for record in _filter_text_keys(chk_diff, text_keys,
 
651
                                            chk_map._bytes_to_text_key):
655
652
                pass
656
653
        except errors.NoSuchRevision, e:
657
654
            # XXX: It would be nice if we could give a more precise error here.
865
862
        if basis_inv is None:
866
863
            if basis_revision_id == _mod_revision.NULL_REVISION:
867
864
                new_inv = self._create_inv_from_null(delta, new_revision_id)
 
865
                if new_inv.root_id is None:
 
866
                    raise errors.RootMissing()
868
867
                inv_lines = new_inv.to_lines()
869
868
                return self._inventory_add_lines(new_revision_id, parents,
870
869
                    inv_lines, check_content=False), new_inv
882
881
            if basis_tree is not None:
883
882
                basis_tree.unlock()
884
883
 
885
 
    def deserialise_inventory(self, revision_id, bytes):
 
884
    def _deserialise_inventory(self, revision_id, bytes):
886
885
        return inventory.CHKInventory.deserialise(self.chk_bytes, bytes,
887
886
            (revision_id,))
888
887
 
904
903
    def _iter_inventory_xmls(self, revision_ids, ordering):
905
904
        # Without a native 'xml' inventory, this method doesn't make sense.
906
905
        # However older working trees, and older bundles want it - so we supply
907
 
        # it allowing get_inventory_xml to work. Bundles currently use the
 
906
        # it allowing _get_inventory_xml to work. Bundles currently use the
908
907
        # serializer directly; this also isn't ideal, but there isn't an xml
909
908
        # iteration interface offered at all for repositories. We could make
910
909
        # _iter_inventory_xmls be part of the contract, even if kept private.
1088
1087
                uninteresting_root_keys.add(inv.id_to_entry.key())
1089
1088
                uninteresting_pid_root_keys.add(
1090
1089
                    inv.parent_id_basename_to_file_id.key())
1091
 
        bytes_to_info = inventory.CHKInventory._bytes_to_utf8name_key
1092
1090
        chk_bytes = self.from_repository.chk_bytes
1093
1091
        def _filter_id_to_entry():
1094
1092
            interesting_nodes = chk_map.iter_interesting_nodes(chk_bytes,
1095
1093
                        self._chk_id_roots, uninteresting_root_keys)
1096
1094
            for record in _filter_text_keys(interesting_nodes, self._text_keys,
1097
 
                    bytes_to_info):
 
1095
                    chk_map._bytes_to_text_key):
1098
1096
                if record is not None:
1099
1097
                    yield record
1100
1098
            # Consumed
1110
1108
        yield 'chk_bytes', _get_parent_id_basename_to_file_id_pages()
1111
1109
 
1112
1110
    def get_stream(self, search):
 
1111
        def wrap_and_count(pb, rc, stream):
 
1112
            """Yield records from stream while showing progress."""
 
1113
            count = 0
 
1114
            for record in stream:
 
1115
                if count == rc.STEP:
 
1116
                    rc.increment(count)
 
1117
                    pb.update('Estimate', rc.current, rc.max)
 
1118
                    count = 0
 
1119
                count += 1
 
1120
                yield record
 
1121
 
1113
1122
        revision_ids = search.get_keys()
 
1123
        pb = ui.ui_factory.nested_progress_bar()
 
1124
        rc = self._record_counter
 
1125
        self._record_counter.setup(len(revision_ids))
1114
1126
        for stream_info in self._fetch_revision_texts(revision_ids):
1115
 
            yield stream_info
 
1127
            yield (stream_info[0],
 
1128
                wrap_and_count(pb, rc, stream_info[1]))
1116
1129
        self._revision_keys = [(rev_id,) for rev_id in revision_ids]
1117
1130
        self.from_repository.revisions.clear_cache()
1118
1131
        self.from_repository.signatures.clear_cache()
1119
 
        yield self._get_inventory_stream(self._revision_keys)
 
1132
        s = self._get_inventory_stream(self._revision_keys)
 
1133
        yield (s[0], wrap_and_count(pb, rc, s[1]))
1120
1134
        self.from_repository.inventories.clear_cache()
1121
1135
        # TODO: The keys to exclude might be part of the search recipe
1122
1136
        # For now, exclude all parents that are at the edge of ancestry, for
1125
1139
        parent_keys = from_repo._find_parent_keys_of_revisions(
1126
1140
                        self._revision_keys)
1127
1141
        for stream_info in self._get_filtered_chk_streams(parent_keys):
1128
 
            yield stream_info
 
1142
            yield (stream_info[0], wrap_and_count(pb, rc, stream_info[1]))
1129
1143
        self.from_repository.chk_bytes.clear_cache()
1130
 
        yield self._get_text_stream()
 
1144
        s = self._get_text_stream()
 
1145
        yield (s[0], wrap_and_count(pb, rc, s[1]))
1131
1146
        self.from_repository.texts.clear_cache()
 
1147
        pb.update('Done', rc.max, rc.max)
 
1148
        pb.finished()
1132
1149
 
1133
1150
    def get_stream_for_missing_keys(self, missing_keys):
1134
1151
        # missing keys can only occur when we are byte copying and not
1188
1205
    return result
1189
1206
 
1190
1207
 
1191
 
def _filter_text_keys(interesting_nodes_iterable, text_keys, bytes_to_info):
 
1208
def _filter_text_keys(interesting_nodes_iterable, text_keys, bytes_to_text_key):
1192
1209
    """Iterate the result of iter_interesting_nodes, yielding the records
1193
1210
    and adding to text_keys.
1194
1211
    """
 
1212
    text_keys_update = text_keys.update
1195
1213
    for record, items in interesting_nodes_iterable:
1196
 
        for name, bytes in items:
1197
 
            # Note: we don't care about name_utf8, because groupcompress repos
1198
 
            # are always rich-root, so there are no synthesised root records to
1199
 
            # ignore.
1200
 
            _, file_id, revision_id = bytes_to_info(bytes)
1201
 
            file_id = intern(file_id)
1202
 
            revision_id = intern(revision_id)
1203
 
            text_keys.add(StaticTuple(file_id, revision_id).intern())
 
1214
        text_keys_update([bytes_to_text_key(b) for n,b in items])
1204
1215
        yield record
1205
1216
 
1206
1217