~ubuntu-branches/ubuntu/natty/bzr/natty-proposed

« back to all changes in this revision

Viewing changes to bzrlib/repofmt/pack_repo.py

  • Committer: Bazaar Package Importer
  • Author(s): Jelmer Vernooij
  • Date: 2010-08-07 00:54:52 UTC
  • mfrom: (1.4.8 upstream)
  • Revision ID: james.westby@ubuntu.com-20100807005452-g4zb99ezl3xn44r4
Tags: 2.2.0-1
* New upstream release.
 + Adds support for setting timestamps to originating revisions.
   Closes: #473450
 + Removes remaining string exception. Closes: #585193, LP: #586926
 + Add C extension to work around Python issue 1628205. LP: #583941,
   Closes: #577110
 + Avoids showing progress bars when --quiet is used. Closes: #542105,
   LP: #320035
 + No longer creates ~/.bazaar as root when run under sudo. LP: #376388
 + 'bzr commit' now supports -p as alternative for --show-diff. LP: #571467
 + 'bzr add' no longer adds .THIS/.BASE/.THEIRS files unless
   explicitly requested. LP: #322767
 + When parsing patch files, Bazaar now supports diff lines before each
   patch. LP: #502076
 + WorkingTrees now no longer requires using signal.signal, so can
   be used in a threaded environment. LP: #521989
 + An assertion error is no longer triggered when pushing to a pre-1.6
   Bazaar server. LP: #528041
* Bump standards version to 3.9.1.

Show diffs side-by-side

added added

removed removed

Lines of Context:
64
64
    GraphIndex,
65
65
    InMemoryGraphIndex,
66
66
    )
 
67
from bzrlib.lock import LogicalLockResult
67
68
from bzrlib.repofmt.knitrepo import KnitRepository
68
69
from bzrlib.repository import (
69
70
    CommitBuilder,
70
71
    MetaDirRepositoryFormat,
71
72
    RepositoryFormat,
 
73
    RepositoryWriteLockResult,
72
74
    RootCommitBuilder,
73
75
    StreamSource,
74
76
    )
587
589
                                             flush_func=flush_func)
588
590
        self.add_callback = None
589
591
 
590
 
    def replace_indices(self, index_to_pack, indices):
591
 
        """Replace the current mappings with fresh ones.
592
 
 
593
 
        This should probably not be used eventually, rather incremental add and
594
 
        removal of indices. It has been added during refactoring of existing
595
 
        code.
596
 
 
597
 
        :param index_to_pack: A mapping from index objects to
598
 
            (transport, name) tuples for the pack file data.
599
 
        :param indices: A list of indices.
600
 
        """
601
 
        # refresh the revision pack map dict without replacing the instance.
602
 
        self.index_to_pack.clear()
603
 
        self.index_to_pack.update(index_to_pack)
604
 
        # XXX: API break - clearly a 'replace' method would be good?
605
 
        self.combined_index._indices[:] = indices
606
 
        # the current add nodes callback for the current writable index if
607
 
        # there is one.
608
 
        self.add_callback = None
609
 
 
610
592
    def add_index(self, index, pack):
611
593
        """Add index to the aggregate, which is an index for Pack pack.
612
594
 
619
601
        # expose it to the index map
620
602
        self.index_to_pack[index] = pack.access_tuple()
621
603
        # put it at the front of the linear index list
622
 
        self.combined_index.insert_index(0, index)
 
604
        self.combined_index.insert_index(0, index, pack.name)
623
605
 
624
606
    def add_writable_index(self, index, pack):
625
607
        """Add an index which is able to have data added to it.
645
627
        self.data_access.set_writer(None, None, (None, None))
646
628
        self.index_to_pack.clear()
647
629
        del self.combined_index._indices[:]
 
630
        del self.combined_index._index_names[:]
648
631
        self.add_callback = None
649
632
 
650
633
    def remove_index(self, index):
653
636
        :param index: An index from the pack parameter.
654
637
        """
655
638
        del self.index_to_pack[index]
656
 
        self.combined_index._indices.remove(index)
 
639
        pos = self.combined_index._indices.index(index)
 
640
        del self.combined_index._indices[pos]
 
641
        del self.combined_index._index_names[pos]
657
642
        if (self.add_callback is not None and
658
643
            getattr(index, 'add_nodes', None) == self.add_callback):
659
644
            self.add_callback = None
1415
1400
        self.inventory_index = AggregateIndex(self.reload_pack_names, flush)
1416
1401
        self.text_index = AggregateIndex(self.reload_pack_names, flush)
1417
1402
        self.signature_index = AggregateIndex(self.reload_pack_names, flush)
 
1403
        all_indices = [self.revision_index, self.inventory_index,
 
1404
                self.text_index, self.signature_index]
1418
1405
        if use_chk_index:
1419
1406
            self.chk_index = AggregateIndex(self.reload_pack_names, flush)
 
1407
            all_indices.append(self.chk_index)
1420
1408
        else:
1421
1409
            # used to determine if we're using a chk_index elsewhere.
1422
1410
            self.chk_index = None
 
1411
        # Tell all the CombinedGraphIndex objects about each other, so they can
 
1412
        # share hints about which pack names to search first.
 
1413
        all_combined = [agg_idx.combined_index for agg_idx in all_indices]
 
1414
        for combined_idx in all_combined:
 
1415
            combined_idx.set_sibling_indices(
 
1416
                set(all_combined).difference([combined_idx]))
1423
1417
        # resumed packs
1424
1418
        self._resumed_packs = []
1425
1419
 
1568
1562
        """Is the collection already packed?"""
1569
1563
        return not (self.repo._format.pack_compresses or (len(self._names) > 1))
1570
1564
 
1571
 
    def pack(self, hint=None):
 
1565
    def pack(self, hint=None, clean_obsolete_packs=False):
1572
1566
        """Pack the pack collection totally."""
1573
1567
        self.ensure_loaded()
1574
1568
        total_packs = len(self._names)
1590
1584
                pack_operations[-1][1].append(pack)
1591
1585
        self._execute_pack_operations(pack_operations, OptimisingPacker)
1592
1586
 
 
1587
        if clean_obsolete_packs:
 
1588
            self._clear_obsolete_packs()
 
1589
 
1593
1590
    def plan_autopack_combinations(self, existing_packs, pack_distribution):
1594
1591
        """Plan a pack operation.
1595
1592
 
2345
2342
        return self._write_lock_count
2346
2343
 
2347
2344
    def lock_write(self, token=None):
 
2345
        """Lock the repository for writes.
 
2346
 
 
2347
        :return: A bzrlib.repository.RepositoryWriteLockResult.
 
2348
        """
2348
2349
        locked = self.is_locked()
2349
2350
        if not self._write_lock_count and locked:
2350
2351
            raise errors.ReadOnlyError(self)
2359
2360
                # Writes don't affect fallback repos
2360
2361
                repo.lock_read()
2361
2362
            self._refresh_data()
 
2363
        return RepositoryWriteLockResult(self.unlock, None)
2362
2364
 
2363
2365
    def lock_read(self):
 
2366
        """Lock the repository for reads.
 
2367
 
 
2368
        :return: A bzrlib.lock.LogicalLockResult.
 
2369
        """
2364
2370
        locked = self.is_locked()
2365
2371
        if self._write_lock_count:
2366
2372
            self._write_lock_count += 1
2373
2379
            for repo in self._fallback_repositories:
2374
2380
                repo.lock_read()
2375
2381
            self._refresh_data()
 
2382
        return LogicalLockResult(self.unlock)
2376
2383
 
2377
2384
    def leave_lock_in_place(self):
2378
2385
        # not supported - raise an error
2383
2390
        raise NotImplementedError(self.dont_leave_lock_in_place)
2384
2391
 
2385
2392
    @needs_write_lock
2386
 
    def pack(self, hint=None):
 
2393
    def pack(self, hint=None, clean_obsolete_packs=False):
2387
2394
        """Compress the data within the repository.
2388
2395
 
2389
2396
        This will pack all the data to a single pack. In future it may
2390
2397
        recompress deltas or do other such expensive operations.
2391
2398
        """
2392
 
        self._pack_collection.pack(hint=hint)
 
2399
        self._pack_collection.pack(hint=hint, clean_obsolete_packs=clean_obsolete_packs)
2393
2400
 
2394
2401
    @needs_write_lock
2395
2402
    def reconcile(self, other=None, thorough=False):
2551
2558
        utf8_files = [('format', self.get_format_string())]
2552
2559
 
2553
2560
        self._upload_blank_content(a_bzrdir, dirs, files, utf8_files, shared)
2554
 
        return self.open(a_bzrdir=a_bzrdir, _found=True)
 
2561
        repository = self.open(a_bzrdir=a_bzrdir, _found=True)
 
2562
        self._run_post_repo_init_hooks(repository, a_bzrdir, shared)
 
2563
        return repository
2555
2564
 
2556
2565
    def open(self, a_bzrdir, _found=False, _override_transport=None):
2557
2566
        """See RepositoryFormat.open().
2620
2629
    repository_class = KnitPackRepository
2621
2630
    _commit_builder_class = PackRootCommitBuilder
2622
2631
    rich_root_data = True
 
2632
    experimental = True
2623
2633
    supports_tree_reference = True
2624
2634
    @property
2625
2635
    def _serializer(self):
2893
2903
    repository_class = KnitPackRepository
2894
2904
    _commit_builder_class = PackRootCommitBuilder
2895
2905
    rich_root_data = True
 
2906
    experimental = True
2896
2907
    supports_tree_reference = True
2897
2908
    supports_external_lookups = True
2898
2909
    # What index classes to use