~lifeless/ubuntu/lucid/bzr/2.1.2-sru

« back to all changes in this revision

Viewing changes to bzrlib/repofmt/pack_repo.py

  • Committer: Bazaar Package Importer
  • Author(s): Jelmer Vernooij
  • Date: 2010-02-17 17:47:40 UTC
  • mfrom: (1.5.2 upstream)
  • mto: This revision was merged to the branch mainline in revision 13.
  • Revision ID: james.westby@ubuntu.com-20100217174740-p1x5dd5uwvxtg9y6
Tags: upstream-2.1.0
ImportĀ upstreamĀ versionĀ 2.1.0

Show diffs side-by-side

added added

removed removed

Lines of Context:
24
24
 
25
25
from bzrlib import (
26
26
    chk_map,
 
27
    cleanup,
27
28
    debug,
28
29
    graph,
29
30
    osutils,
54
55
    revision as _mod_revision,
55
56
    )
56
57
 
57
 
from bzrlib.decorators import needs_write_lock
 
58
from bzrlib.decorators import needs_write_lock, only_raises
58
59
from bzrlib.btree_index import (
59
60
    BTreeGraphIndex,
60
61
    BTreeBuilder,
73
74
    )
74
75
from bzrlib.trace import (
75
76
    mutter,
 
77
    note,
76
78
    warning,
77
79
    )
78
80
 
645
647
        del self.combined_index._indices[:]
646
648
        self.add_callback = None
647
649
 
648
 
    def remove_index(self, index, pack):
 
650
    def remove_index(self, index):
649
651
        """Remove index from the indices used to answer queries.
650
652
 
651
653
        :param index: An index from the pack parameter.
652
 
        :param pack: A Pack instance.
653
654
        """
654
655
        del self.index_to_pack[index]
655
656
        self.combined_index._indices.remove(index)
1116
1117
            iterator is a tuple with:
1117
1118
            index, readv_vector, node_vector. readv_vector is a list ready to
1118
1119
            hand to the transport readv method, and node_vector is a list of
1119
 
            (key, eol_flag, references) for the the node retrieved by the
 
1120
            (key, eol_flag, references) for the node retrieved by the
1120
1121
            matching readv_vector.
1121
1122
        """
1122
1123
        # group by pack so we do one readv per pack
1422
1423
        # resumed packs
1423
1424
        self._resumed_packs = []
1424
1425
 
 
1426
    def __repr__(self):
 
1427
        return '%s(%r)' % (self.__class__.__name__, self.repo)
 
1428
 
1425
1429
    def add_pack_to_memory(self, pack):
1426
1430
        """Make a Pack object available to the repository to satisfy queries.
1427
1431
 
1541
1545
                self._remove_pack_from_memory(pack)
1542
1546
        # record the newly available packs and stop advertising the old
1543
1547
        # packs
1544
 
        result = self._save_pack_names(clear_obsolete_packs=True)
1545
 
        # Move the old packs out of the way now they are no longer referenced.
1546
 
        for revision_count, packs in pack_operations:
1547
 
            self._obsolete_packs(packs)
 
1548
        to_be_obsoleted = []
 
1549
        for _, packs in pack_operations:
 
1550
            to_be_obsoleted.extend(packs)
 
1551
        result = self._save_pack_names(clear_obsolete_packs=True,
 
1552
                                       obsolete_packs=to_be_obsoleted)
1548
1553
        return result
1549
1554
 
1550
1555
    def _flush_new_pack(self):
1784
1789
        :param return: None.
1785
1790
        """
1786
1791
        for pack in packs:
1787
 
            pack.pack_transport.rename(pack.file_name(),
1788
 
                '../obsolete_packs/' + pack.file_name())
 
1792
            try:
 
1793
                pack.pack_transport.rename(pack.file_name(),
 
1794
                    '../obsolete_packs/' + pack.file_name())
 
1795
            except (errors.PathError, errors.TransportError), e:
 
1796
                # TODO: Should these be warnings or mutters?
 
1797
                mutter("couldn't rename obsolete pack, skipping it:\n%s"
 
1798
                       % (e,))
1789
1799
            # TODO: Probably needs to know all possible indices for this pack
1790
1800
            # - or maybe list the directory and move all indices matching this
1791
1801
            # name whether we recognize it or not?
1793
1803
            if self.chk_index is not None:
1794
1804
                suffixes.append('.cix')
1795
1805
            for suffix in suffixes:
1796
 
                self._index_transport.rename(pack.name + suffix,
1797
 
                    '../obsolete_packs/' + pack.name + suffix)
 
1806
                try:
 
1807
                    self._index_transport.rename(pack.name + suffix,
 
1808
                        '../obsolete_packs/' + pack.name + suffix)
 
1809
                except (errors.PathError, errors.TransportError), e:
 
1810
                    mutter("couldn't rename obsolete index, skipping it:\n%s"
 
1811
                           % (e,))
1798
1812
 
1799
1813
    def pack_distribution(self, total_revisions):
1800
1814
        """Generate a list of the number of revisions to put in each pack.
1826
1840
        self._remove_pack_indices(pack)
1827
1841
        self.packs.remove(pack)
1828
1842
 
1829
 
    def _remove_pack_indices(self, pack):
1830
 
        """Remove the indices for pack from the aggregated indices."""
1831
 
        self.revision_index.remove_index(pack.revision_index, pack)
1832
 
        self.inventory_index.remove_index(pack.inventory_index, pack)
1833
 
        self.text_index.remove_index(pack.text_index, pack)
1834
 
        self.signature_index.remove_index(pack.signature_index, pack)
1835
 
        if self.chk_index is not None:
1836
 
            self.chk_index.remove_index(pack.chk_index, pack)
 
1843
    def _remove_pack_indices(self, pack, ignore_missing=False):
 
1844
        """Remove the indices for pack from the aggregated indices.
 
1845
        
 
1846
        :param ignore_missing: Suppress KeyErrors from calling remove_index.
 
1847
        """
 
1848
        for index_type in Pack.index_definitions.keys():
 
1849
            attr_name = index_type + '_index'
 
1850
            aggregate_index = getattr(self, attr_name)
 
1851
            if aggregate_index is not None:
 
1852
                pack_index = getattr(pack, attr_name)
 
1853
                try:
 
1854
                    aggregate_index.remove_index(pack_index)
 
1855
                except KeyError:
 
1856
                    if ignore_missing:
 
1857
                        continue
 
1858
                    raise
1837
1859
 
1838
1860
    def reset(self):
1839
1861
        """Clear all cached data."""
1872
1894
        disk_nodes = set()
1873
1895
        for index, key, value in self._iter_disk_pack_index():
1874
1896
            disk_nodes.add((key, value))
 
1897
        orig_disk_nodes = set(disk_nodes)
1875
1898
 
1876
1899
        # do a two-way diff against our original content
1877
1900
        current_nodes = set()
1890
1913
        disk_nodes.difference_update(deleted_nodes)
1891
1914
        disk_nodes.update(new_nodes)
1892
1915
 
1893
 
        return disk_nodes, deleted_nodes, new_nodes
 
1916
        return disk_nodes, deleted_nodes, new_nodes, orig_disk_nodes
1894
1917
 
1895
1918
    def _syncronize_pack_names_from_disk_nodes(self, disk_nodes):
1896
1919
        """Given the correct set of pack files, update our saved info.
1936
1959
                added.append(name)
1937
1960
        return removed, added, modified
1938
1961
 
1939
 
    def _save_pack_names(self, clear_obsolete_packs=False):
 
1962
    def _save_pack_names(self, clear_obsolete_packs=False, obsolete_packs=None):
1940
1963
        """Save the list of packs.
1941
1964
 
1942
1965
        This will take out the mutex around the pack names list for the
1946
1969
 
1947
1970
        :param clear_obsolete_packs: If True, clear out the contents of the
1948
1971
            obsolete_packs directory.
 
1972
        :param obsolete_packs: Packs that are obsolete once the new pack-names
 
1973
            file has been written.
1949
1974
        :return: A list of the names saved that were not previously on disk.
1950
1975
        """
 
1976
        already_obsolete = []
1951
1977
        self.lock_names()
1952
1978
        try:
1953
1979
            builder = self._index_builder_class()
1954
 
            disk_nodes, deleted_nodes, new_nodes = self._diff_pack_names()
 
1980
            (disk_nodes, deleted_nodes, new_nodes,
 
1981
             orig_disk_nodes) = self._diff_pack_names()
1955
1982
            # TODO: handle same-name, index-size-changes here -
1956
1983
            # e.g. use the value from disk, not ours, *unless* we're the one
1957
1984
            # changing it.
1959
1986
                builder.add_node(key, value)
1960
1987
            self.transport.put_file('pack-names', builder.finish(),
1961
1988
                mode=self.repo.bzrdir._get_file_mode())
1962
 
            # move the baseline forward
1963
1989
            self._packs_at_load = disk_nodes
1964
1990
            if clear_obsolete_packs:
1965
 
                self._clear_obsolete_packs()
 
1991
                to_preserve = None
 
1992
                if obsolete_packs:
 
1993
                    to_preserve = set([o.name for o in obsolete_packs])
 
1994
                already_obsolete = self._clear_obsolete_packs(to_preserve)
1966
1995
        finally:
1967
1996
            self._unlock_names()
1968
1997
        # synchronise the memory packs list with what we just wrote:
1969
1998
        self._syncronize_pack_names_from_disk_nodes(disk_nodes)
 
1999
        if obsolete_packs:
 
2000
            # TODO: We could add one more condition here. "if o.name not in
 
2001
            #       orig_disk_nodes and o != the new_pack we haven't written to
 
2002
            #       disk yet. However, the new pack object is not easily
 
2003
            #       accessible here (it would have to be passed through the
 
2004
            #       autopacking code, etc.)
 
2005
            obsolete_packs = [o for o in obsolete_packs
 
2006
                              if o.name not in already_obsolete]
 
2007
            self._obsolete_packs(obsolete_packs)
1970
2008
        return [new_node[0][0] for new_node in new_nodes]
1971
2009
 
1972
2010
    def reload_pack_names(self):
1987
2025
        if first_read:
1988
2026
            return True
1989
2027
        # out the new value.
1990
 
        disk_nodes, deleted_nodes, new_nodes = self._diff_pack_names()
 
2028
        (disk_nodes, deleted_nodes, new_nodes,
 
2029
         orig_disk_nodes) = self._diff_pack_names()
1991
2030
        # _packs_at_load is meant to be the explicit list of names in
1992
2031
        # 'pack-names' at then start. As such, it should not contain any
1993
2032
        # pending names that haven't been written out yet.
1994
 
        pack_names_nodes = disk_nodes.difference(new_nodes)
1995
 
        pack_names_nodes.update(deleted_nodes)
1996
 
        self._packs_at_load = pack_names_nodes
 
2033
        self._packs_at_load = orig_disk_nodes
1997
2034
        (removed, added,
1998
2035
         modified) = self._syncronize_pack_names_from_disk_nodes(disk_nodes)
1999
2036
        if removed or added or modified:
2008
2045
            raise
2009
2046
        raise errors.RetryAutopack(self.repo, False, sys.exc_info())
2010
2047
 
2011
 
    def _clear_obsolete_packs(self):
 
2048
    def _clear_obsolete_packs(self, preserve=None):
2012
2049
        """Delete everything from the obsolete-packs directory.
 
2050
 
 
2051
        :return: A list of pack identifiers (the filename without '.pack') that
 
2052
            were found in obsolete_packs.
2013
2053
        """
 
2054
        found = []
2014
2055
        obsolete_pack_transport = self.transport.clone('obsolete_packs')
 
2056
        if preserve is None:
 
2057
            preserve = set()
2015
2058
        for filename in obsolete_pack_transport.list_dir('.'):
 
2059
            name, ext = osutils.splitext(filename)
 
2060
            if ext == '.pack':
 
2061
                found.append(name)
 
2062
            if name in preserve:
 
2063
                continue
2016
2064
            try:
2017
2065
                obsolete_pack_transport.delete(filename)
2018
2066
            except (errors.PathError, errors.TransportError), e:
2019
 
                warning("couldn't delete obsolete pack, skipping it:\n%s" % (e,))
 
2067
                warning("couldn't delete obsolete pack, skipping it:\n%s"
 
2068
                        % (e,))
 
2069
        return found
2020
2070
 
2021
2071
    def _start_write_group(self):
2022
2072
        # Do not permit preparation for writing if we're not in a 'write lock'.
2049
2099
        # FIXME: just drop the transient index.
2050
2100
        # forget what names there are
2051
2101
        if self._new_pack is not None:
2052
 
            try:
2053
 
                self._new_pack.abort()
2054
 
            finally:
2055
 
                # XXX: If we aborted while in the middle of finishing the write
2056
 
                # group, _remove_pack_indices can fail because the indexes are
2057
 
                # already gone.  If they're not there we shouldn't fail in this
2058
 
                # case.  -- mbp 20081113
2059
 
                self._remove_pack_indices(self._new_pack)
2060
 
                self._new_pack = None
 
2102
            operation = cleanup.OperationWithCleanups(self._new_pack.abort)
 
2103
            operation.add_cleanup(setattr, self, '_new_pack', None)
 
2104
            # If we aborted while in the middle of finishing the write
 
2105
            # group, _remove_pack_indices could fail because the indexes are
 
2106
            # already gone.  But they're not there we shouldn't fail in this
 
2107
            # case, so we pass ignore_missing=True.
 
2108
            operation.add_cleanup(self._remove_pack_indices, self._new_pack,
 
2109
                ignore_missing=True)
 
2110
            operation.run_simple()
2061
2111
        for resumed_pack in self._resumed_packs:
2062
 
            try:
2063
 
                resumed_pack.abort()
2064
 
            finally:
2065
 
                # See comment in previous finally block.
2066
 
                try:
2067
 
                    self._remove_pack_indices(resumed_pack)
2068
 
                except KeyError:
2069
 
                    pass
 
2112
            operation = cleanup.OperationWithCleanups(resumed_pack.abort)
 
2113
            # See comment in previous finally block.
 
2114
            operation.add_cleanup(self._remove_pack_indices, resumed_pack,
 
2115
                ignore_missing=True)
 
2116
            operation.run_simple()
2070
2117
        del self._resumed_packs[:]
2071
2118
 
2072
2119
    def _remove_resumed_pack_indices(self):
2238
2285
        self._reconcile_fixes_text_parents = True
2239
2286
        self._reconcile_backsup_inventory = False
2240
2287
 
2241
 
    def _warn_if_deprecated(self):
 
2288
    def _warn_if_deprecated(self, branch=None):
2242
2289
        # This class isn't deprecated, but one sub-format is
2243
2290
        if isinstance(self._format, RepositoryFormatKnitPack5RichRootBroken):
2244
 
            from bzrlib import repository
2245
 
            if repository._deprecation_warning_done:
2246
 
                return
2247
 
            repository._deprecation_warning_done = True
2248
 
            warning("Format %s for %s is deprecated - please use"
2249
 
                    " 'bzr upgrade --1.6.1-rich-root'"
2250
 
                    % (self._format, self.bzrdir.transport.base))
 
2291
            super(KnitPackRepository, self)._warn_if_deprecated(branch)
2251
2292
 
2252
2293
    def _abort_write_group(self):
2253
2294
        self.revisions._index._key_dependencies.clear()
2311
2352
        if self._write_lock_count == 1:
2312
2353
            self._transaction = transactions.WriteTransaction()
2313
2354
        if not locked:
 
2355
            if 'relock' in debug.debug_flags and self._prev_lock == 'w':
 
2356
                note('%r was write locked again', self)
 
2357
            self._prev_lock = 'w'
2314
2358
            for repo in self._fallback_repositories:
2315
2359
                # Writes don't affect fallback repos
2316
2360
                repo.lock_read()
2323
2367
        else:
2324
2368
            self.control_files.lock_read()
2325
2369
        if not locked:
 
2370
            if 'relock' in debug.debug_flags and self._prev_lock == 'r':
 
2371
                note('%r was read locked again', self)
 
2372
            self._prev_lock = 'r'
2326
2373
            for repo in self._fallback_repositories:
2327
2374
                repo.lock_read()
2328
2375
            self._refresh_data()
2356
2403
        packer = ReconcilePacker(collection, packs, extension, revs)
2357
2404
        return packer.pack(pb)
2358
2405
 
 
2406
    @only_raises(errors.LockNotHeld, errors.LockBroken)
2359
2407
    def unlock(self):
2360
2408
        if self._write_lock_count == 1 and self._write_group is not None:
2361
2409
            self.abort_write_group()