mercurial/revlog.py
branchstable
changeset 50316 87f0155d68aa
parent 49828 9854a9adc466
child 50348 f952be90b051
equal deleted inserted replaced
50315:cf6e1d535602 50316:87f0155d68aa
   300         censorable=False,
   300         censorable=False,
   301         upperboundcomp=None,
   301         upperboundcomp=None,
   302         persistentnodemap=False,
   302         persistentnodemap=False,
   303         concurrencychecker=None,
   303         concurrencychecker=None,
   304         trypending=False,
   304         trypending=False,
       
   305         try_split=False,
   305         canonical_parent_order=True,
   306         canonical_parent_order=True,
   306     ):
   307     ):
   307         """
   308         """
   308         create a revlog object
   309         create a revlog object
   309 
   310 
   326         self._datafile = None
   327         self._datafile = None
   327         self._sidedatafile = None
   328         self._sidedatafile = None
   328         self._nodemap_file = None
   329         self._nodemap_file = None
   329         self.postfix = postfix
   330         self.postfix = postfix
   330         self._trypending = trypending
   331         self._trypending = trypending
       
   332         self._try_split = try_split
   331         self.opener = opener
   333         self.opener = opener
   332         if persistentnodemap:
   334         if persistentnodemap:
   333             self._nodemap_file = nodemaputil.get_nodemap_file(self)
   335             self._nodemap_file = nodemaputil.get_nodemap_file(self)
   334 
   336 
   335         assert target[0] in ALL_KINDS
   337         assert target[0] in ALL_KINDS
   509 
   511 
   510         if self.postfix is not None:
   512         if self.postfix is not None:
   511             entry_point = b'%s.i.%s' % (self.radix, self.postfix)
   513             entry_point = b'%s.i.%s' % (self.radix, self.postfix)
   512         elif self._trypending and self.opener.exists(b'%s.i.a' % self.radix):
   514         elif self._trypending and self.opener.exists(b'%s.i.a' % self.radix):
   513             entry_point = b'%s.i.a' % self.radix
   515             entry_point = b'%s.i.a' % self.radix
       
   516         elif self._try_split and self.opener.exists(b'%s.i.s' % self.radix):
       
   517             entry_point = b'%s.i.s' % self.radix
   514         else:
   518         else:
   515             entry_point = b'%s.i' % self.radix
   519             entry_point = b'%s.i' % self.radix
   516 
   520 
   517         if docket is not None:
   521         if docket is not None:
   518             self._docket = docket
   522             self._docket = docket
  2013         except error.RevlogError:
  2017         except error.RevlogError:
  2014             if self._censorable and storageutil.iscensoredtext(text):
  2018             if self._censorable and storageutil.iscensoredtext(text):
  2015                 raise error.CensoredNodeError(self.display_id, node, text)
  2019                 raise error.CensoredNodeError(self.display_id, node, text)
  2016             raise
  2020             raise
  2017 
  2021 
  2018     def _enforceinlinesize(self, tr):
  2022     def _enforceinlinesize(self, tr, side_write=True):
  2019         """Check if the revlog is too big for inline and convert if so.
  2023         """Check if the revlog is too big for inline and convert if so.
  2020 
  2024 
  2021         This should be called after revisions are added to the revlog. If the
  2025         This should be called after revisions are added to the revlog. If the
  2022         revlog has grown too large to be an inline revlog, it will convert it
  2026         revlog has grown too large to be an inline revlog, it will convert it
  2023         to use multiple index and data files.
  2027         to use multiple index and data files.
  2030         troffset = tr.findoffset(self._indexfile)
  2034         troffset = tr.findoffset(self._indexfile)
  2031         if troffset is None:
  2035         if troffset is None:
  2032             raise error.RevlogError(
  2036             raise error.RevlogError(
  2033                 _(b"%s not found in the transaction") % self._indexfile
  2037                 _(b"%s not found in the transaction") % self._indexfile
  2034             )
  2038             )
  2035         trindex = None
  2039         if troffset:
       
  2040             tr.addbackup(self._indexfile, for_offset=True)
  2036         tr.add(self._datafile, 0)
  2041         tr.add(self._datafile, 0)
  2037 
  2042 
  2038         existing_handles = False
  2043         existing_handles = False
  2039         if self._writinghandles is not None:
  2044         if self._writinghandles is not None:
  2040             existing_handles = True
  2045             existing_handles = True
  2046             self._writinghandles = None
  2051             self._writinghandles = None
  2047             self._segmentfile.writing_handle = None
  2052             self._segmentfile.writing_handle = None
  2048             # No need to deal with sidedata writing handle as it is only
  2053             # No need to deal with sidedata writing handle as it is only
  2049             # relevant with revlog-v2 which is never inline, not reaching
  2054             # relevant with revlog-v2 which is never inline, not reaching
  2050             # this code
  2055             # this code
       
  2056         if side_write:
       
  2057             old_index_file_path = self._indexfile
       
  2058             new_index_file_path = self._indexfile + b'.s'
       
  2059             opener = self.opener
       
  2060 
       
  2061             fncache = getattr(opener, 'fncache', None)
       
  2062             if fncache is not None:
       
  2063                 fncache.addignore(new_index_file_path)
       
  2064 
       
  2065             # the "split" index replace the real index when the transaction is finalized
       
  2066             def finalize_callback(tr):
       
  2067                 opener.rename(
       
  2068                     new_index_file_path,
       
  2069                     old_index_file_path,
       
  2070                     checkambig=True,
       
  2071                 )
       
  2072 
       
  2073             tr.registertmp(new_index_file_path)
       
  2074             if self.target[1] is not None:
       
  2075                 finalize_id = b'000-revlog-split-%d-%s' % self.target
       
  2076             else:
       
  2077                 finalize_id = b'000-revlog-split-%d' % self.target[0]
       
  2078             tr.addfinalize(finalize_id, finalize_callback)
  2051 
  2079 
  2052         new_dfh = self._datafp(b'w+')
  2080         new_dfh = self._datafp(b'w+')
  2053         new_dfh.truncate(0)  # drop any potentially existing data
  2081         new_dfh.truncate(0)  # drop any potentially existing data
  2054         try:
  2082         try:
  2055             with self._indexfp() as read_ifh:
  2083             with self._indexfp() as read_ifh:
  2056                 for r in self:
  2084                 for r in self:
  2057                     new_dfh.write(self._getsegmentforrevs(r, r, df=read_ifh)[1])
  2085                     new_dfh.write(self._getsegmentforrevs(r, r, df=read_ifh)[1])
  2058                     if (
       
  2059                         trindex is None
       
  2060                         and troffset
       
  2061                         <= self.start(r) + r * self.index.entry_size
       
  2062                     ):
       
  2063                         trindex = r
       
  2064                 new_dfh.flush()
  2086                 new_dfh.flush()
  2065 
  2087 
  2066             if trindex is None:
  2088             if side_write:
  2067                 trindex = 0
  2089                 self._indexfile = new_index_file_path
  2068 
       
  2069             with self.__index_new_fp() as fp:
  2090             with self.__index_new_fp() as fp:
  2070                 self._format_flags &= ~FLAG_INLINE_DATA
  2091                 self._format_flags &= ~FLAG_INLINE_DATA
  2071                 self._inline = False
  2092                 self._inline = False
  2072                 for i in self:
  2093                 for i in self:
  2073                     e = self.index.entry_binary(i)
  2094                     e = self.index.entry_binary(i)
  2077                         e = header + e
  2098                         e = header + e
  2078                     fp.write(e)
  2099                     fp.write(e)
  2079                 if self._docket is not None:
  2100                 if self._docket is not None:
  2080                     self._docket.index_end = fp.tell()
  2101                     self._docket.index_end = fp.tell()
  2081 
  2102 
  2082                 # There is a small transactional race here. If the rename of
  2103                 # If we don't use side-write, the temp file replace the real
  2083                 # the index fails, we should remove the datafile. It is more
  2104                 # index when we exit the context manager
  2084                 # important to ensure that the data file is not truncated
  2105 
  2085                 # when the index is replaced as otherwise data is lost.
       
  2086                 tr.replace(self._datafile, self.start(trindex))
       
  2087 
       
  2088                 # the temp file replace the real index when we exit the context
       
  2089                 # manager
       
  2090 
       
  2091             tr.replace(self._indexfile, trindex * self.index.entry_size)
       
  2092             nodemaputil.setup_persistent_nodemap(tr, self)
  2106             nodemaputil.setup_persistent_nodemap(tr, self)
  2093             self._segmentfile = randomaccessfile.randomaccessfile(
  2107             self._segmentfile = randomaccessfile.randomaccessfile(
  2094                 self.opener,
  2108                 self.opener,
  2095                 self._datafile,
  2109                 self._datafile,
  2096                 self._chunkcachesize,
  2110                 self._chunkcachesize,