comparison mercurial/context.py @ 43077:687b865b95ad

formatting: byteify all mercurial/ and hgext/ string literals Done with python3.7 contrib/byteify-strings.py -i $(hg files 'set:mercurial/**.py - mercurial/thirdparty/** + hgext/**.py - hgext/fsmonitor/pywatchman/** - mercurial/__init__.py') black -l 80 -t py33 -S $(hg files 'set:**.py - mercurial/thirdparty/** - "contrib/python-zstandard/**" - hgext/fsmonitor/pywatchman/**') # skip-blame mass-reformatting only Differential Revision: https://phab.mercurial-scm.org/D6972
author Augie Fackler <augie@google.com>
date Sun, 06 Oct 2019 09:48:39 -0400
parents 2372284d9457
children eef9a2d67051
comparison
equal deleted inserted replaced
43076:2372284d9457 43077:687b865b95ad
199 def matchfileset(self, expr, badfn=None): 199 def matchfileset(self, expr, badfn=None):
200 return fileset.match(self, expr, badfn=badfn) 200 return fileset.match(self, expr, badfn=badfn)
201 201
202 def obsolete(self): 202 def obsolete(self):
203 """True if the changeset is obsolete""" 203 """True if the changeset is obsolete"""
204 return self.rev() in obsmod.getrevs(self._repo, 'obsolete') 204 return self.rev() in obsmod.getrevs(self._repo, b'obsolete')
205 205
206 def extinct(self): 206 def extinct(self):
207 """True if the changeset is extinct""" 207 """True if the changeset is extinct"""
208 return self.rev() in obsmod.getrevs(self._repo, 'extinct') 208 return self.rev() in obsmod.getrevs(self._repo, b'extinct')
209 209
210 def orphan(self): 210 def orphan(self):
211 """True if the changeset is not obsolete, but its ancestor is""" 211 """True if the changeset is not obsolete, but its ancestor is"""
212 return self.rev() in obsmod.getrevs(self._repo, 'orphan') 212 return self.rev() in obsmod.getrevs(self._repo, b'orphan')
213 213
214 def phasedivergent(self): 214 def phasedivergent(self):
215 """True if the changeset tries to be a successor of a public changeset 215 """True if the changeset tries to be a successor of a public changeset
216 216
217 Only non-public and non-obsolete changesets may be phase-divergent. 217 Only non-public and non-obsolete changesets may be phase-divergent.
218 """ 218 """
219 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent') 219 return self.rev() in obsmod.getrevs(self._repo, b'phasedivergent')
220 220
221 def contentdivergent(self): 221 def contentdivergent(self):
222 """Is a successor of a changeset with multiple possible successor sets 222 """Is a successor of a changeset with multiple possible successor sets
223 223
224 Only non-public and non-obsolete changesets may be content-divergent. 224 Only non-public and non-obsolete changesets may be content-divergent.
225 """ 225 """
226 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent') 226 return self.rev() in obsmod.getrevs(self._repo, b'contentdivergent')
227 227
228 def isunstable(self): 228 def isunstable(self):
229 """True if the changeset is either orphan, phase-divergent or 229 """True if the changeset is either orphan, phase-divergent or
230 content-divergent""" 230 content-divergent"""
231 return self.orphan() or self.phasedivergent() or self.contentdivergent() 231 return self.orphan() or self.phasedivergent() or self.contentdivergent()
238 - phase-divergent, 238 - phase-divergent,
239 - content-divergent. 239 - content-divergent.
240 """ 240 """
241 instabilities = [] 241 instabilities = []
242 if self.orphan(): 242 if self.orphan():
243 instabilities.append('orphan') 243 instabilities.append(b'orphan')
244 if self.phasedivergent(): 244 if self.phasedivergent():
245 instabilities.append('phase-divergent') 245 instabilities.append(b'phase-divergent')
246 if self.contentdivergent(): 246 if self.contentdivergent():
247 instabilities.append('content-divergent') 247 instabilities.append(b'content-divergent')
248 return instabilities 248 return instabilities
249 249
250 def parents(self): 250 def parents(self):
251 """return contexts for each parent changeset""" 251 """return contexts for each parent changeset"""
252 return self._parents 252 return self._parents
264 if r'_manifest' in self.__dict__: 264 if r'_manifest' in self.__dict__:
265 try: 265 try:
266 return self._manifest[path], self._manifest.flags(path) 266 return self._manifest[path], self._manifest.flags(path)
267 except KeyError: 267 except KeyError:
268 raise error.ManifestLookupError( 268 raise error.ManifestLookupError(
269 self._node, path, _('not found in manifest') 269 self._node, path, _(b'not found in manifest')
270 ) 270 )
271 if r'_manifestdelta' in self.__dict__ or path in self.files(): 271 if r'_manifestdelta' in self.__dict__ or path in self.files():
272 if path in self._manifestdelta: 272 if path in self._manifestdelta:
273 return ( 273 return (
274 self._manifestdelta[path], 274 self._manifestdelta[path],
277 mfl = self._repo.manifestlog 277 mfl = self._repo.manifestlog
278 try: 278 try:
279 node, flag = mfl[self._changeset.manifest].find(path) 279 node, flag = mfl[self._changeset.manifest].find(path)
280 except KeyError: 280 except KeyError:
281 raise error.ManifestLookupError( 281 raise error.ManifestLookupError(
282 self._node, path, _('not found in manifest') 282 self._node, path, _(b'not found in manifest')
283 ) 283 )
284 284
285 return node, flag 285 return node, flag
286 286
287 def filenode(self, path): 287 def filenode(self, path):
289 289
290 def flags(self, path): 290 def flags(self, path):
291 try: 291 try:
292 return self._fileinfo(path)[1] 292 return self._fileinfo(path)[1]
293 except error.LookupError: 293 except error.LookupError:
294 return '' 294 return b''
295 295
296 @propertycache 296 @propertycache
297 def _copies(self): 297 def _copies(self):
298 return copies.computechangesetcopies(self) 298 return copies.computechangesetcopies(self)
299 299
319 def match( 319 def match(
320 self, 320 self,
321 pats=None, 321 pats=None,
322 include=None, 322 include=None,
323 exclude=None, 323 exclude=None,
324 default='glob', 324 default=b'glob',
325 listsubrepos=False, 325 listsubrepos=False,
326 badfn=None, 326 badfn=None,
327 ): 327 ):
328 r = self._repo 328 r = self._repo
329 return matchmod.match( 329 return matchmod.match(
444 clean=listclean, 444 clean=listclean,
445 unknown=listunknown, 445 unknown=listunknown,
446 listsubrepos=True, 446 listsubrepos=True,
447 ) 447 )
448 for rfiles, sfiles in zip(r, s): 448 for rfiles, sfiles in zip(r, s):
449 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles) 449 rfiles.extend(b"%s/%s" % (subpath, f) for f in sfiles)
450 450
451 for l in r: 451 for l in r:
452 l.sort() 452 l.sort()
453 453
454 return r 454 return r
527 modified.difference_update(self.filesadded()) 527 modified.difference_update(self.filesadded())
528 modified.difference_update(self.filesremoved()) 528 modified.difference_update(self.filesremoved())
529 return sorted(modified) 529 return sorted(modified)
530 530
531 def filesadded(self): 531 def filesadded(self):
532 source = self._repo.ui.config('experimental', 'copies.read-from') 532 source = self._repo.ui.config(b'experimental', b'copies.read-from')
533 filesadded = self._changeset.filesadded 533 filesadded = self._changeset.filesadded
534 if source == 'changeset-only': 534 if source == b'changeset-only':
535 if filesadded is None: 535 if filesadded is None:
536 filesadded = [] 536 filesadded = []
537 elif source == 'compatibility': 537 elif source == b'compatibility':
538 if filesadded is None: 538 if filesadded is None:
539 filesadded = scmutil.computechangesetfilesadded(self) 539 filesadded = scmutil.computechangesetfilesadded(self)
540 else: 540 else:
541 filesadded = scmutil.computechangesetfilesadded(self) 541 filesadded = scmutil.computechangesetfilesadded(self)
542 return filesadded 542 return filesadded
543 543
544 def filesremoved(self): 544 def filesremoved(self):
545 source = self._repo.ui.config('experimental', 'copies.read-from') 545 source = self._repo.ui.config(b'experimental', b'copies.read-from')
546 filesremoved = self._changeset.filesremoved 546 filesremoved = self._changeset.filesremoved
547 if source == 'changeset-only': 547 if source == b'changeset-only':
548 if filesremoved is None: 548 if filesremoved is None:
549 filesremoved = [] 549 filesremoved = []
550 elif source == 'compatibility': 550 elif source == b'compatibility':
551 if filesremoved is None: 551 if filesremoved is None:
552 filesremoved = scmutil.computechangesetfilesremoved(self) 552 filesremoved = scmutil.computechangesetfilesremoved(self)
553 else: 553 else:
554 filesremoved = scmutil.computechangesetfilesremoved(self) 554 filesremoved = scmutil.computechangesetfilesremoved(self)
555 return filesremoved 555 return filesremoved
556 556
557 @propertycache 557 @propertycache
558 def _copies(self): 558 def _copies(self):
559 source = self._repo.ui.config('experimental', 'copies.read-from') 559 source = self._repo.ui.config(b'experimental', b'copies.read-from')
560 p1copies = self._changeset.p1copies 560 p1copies = self._changeset.p1copies
561 p2copies = self._changeset.p2copies 561 p2copies = self._changeset.p2copies
562 # If config says to get copy metadata only from changeset, then return 562 # If config says to get copy metadata only from changeset, then return
563 # that, defaulting to {} if there was no copy metadata. 563 # that, defaulting to {} if there was no copy metadata.
564 # In compatibility mode, we return copy data from the changeset if 564 # In compatibility mode, we return copy data from the changeset if
565 # it was recorded there, and otherwise we fall back to getting it from 565 # it was recorded there, and otherwise we fall back to getting it from
566 # the filelogs (below). 566 # the filelogs (below).
567 if source == 'changeset-only': 567 if source == b'changeset-only':
568 if p1copies is None: 568 if p1copies is None:
569 p1copies = {} 569 p1copies = {}
570 if p2copies is None: 570 if p2copies is None:
571 p2copies = {} 571 p2copies = {}
572 elif source == 'compatibility': 572 elif source == b'compatibility':
573 if p1copies is None: 573 if p1copies is None:
574 # we are in compatiblity mode and there is not data in the 574 # we are in compatiblity mode and there is not data in the
575 # changeset), we get the copy metadata from the filelogs. 575 # changeset), we get the copy metadata from the filelogs.
576 p1copies, p2copies = super(changectx, self)._copies 576 p1copies, p2copies = super(changectx, self)._copies
577 else: 577 else:
582 582
583 def description(self): 583 def description(self):
584 return self._changeset.description 584 return self._changeset.description
585 585
586 def branch(self): 586 def branch(self):
587 return encoding.tolocal(self._changeset.extra.get("branch")) 587 return encoding.tolocal(self._changeset.extra.get(b"branch"))
588 588
589 def closesbranch(self): 589 def closesbranch(self):
590 return 'close' in self._changeset.extra 590 return b'close' in self._changeset.extra
591 591
592 def extra(self): 592 def extra(self):
593 """Return a dict of extra information.""" 593 """Return a dict of extra information."""
594 return self._changeset.extra 594 return self._changeset.extra
595 595
603 603
604 def phase(self): 604 def phase(self):
605 return self._repo._phasecache.phase(self._repo, self._rev) 605 return self._repo._phasecache.phase(self._repo, self._rev)
606 606
607 def hidden(self): 607 def hidden(self):
608 return self._rev in repoview.filterrevs(self._repo, 'visible') 608 return self._rev in repoview.filterrevs(self._repo, b'visible')
609 609
610 def isinmemory(self): 610 def isinmemory(self):
611 return False 611 return False
612 612
613 def children(self): 613 def children(self):
654 anc = nullid 654 anc = nullid
655 elif len(cahs) == 1: 655 elif len(cahs) == 1:
656 anc = cahs[0] 656 anc = cahs[0]
657 else: 657 else:
658 # experimental config: merge.preferancestor 658 # experimental config: merge.preferancestor
659 for r in self._repo.ui.configlist('merge', 'preferancestor'): 659 for r in self._repo.ui.configlist(b'merge', b'preferancestor'):
660 try: 660 try:
661 ctx = scmutil.revsymbol(self._repo, r) 661 ctx = scmutil.revsymbol(self._repo, r)
662 except error.RepoLookupError: 662 except error.RepoLookupError:
663 continue 663 continue
664 anc = ctx.node() 664 anc = ctx.node()
667 else: 667 else:
668 anc = self._repo.changelog.ancestor(self._node, n2) 668 anc = self._repo.changelog.ancestor(self._node, n2)
669 if warn: 669 if warn:
670 self._repo.ui.status( 670 self._repo.ui.status(
671 ( 671 (
672 _("note: using %s as ancestor of %s and %s\n") 672 _(b"note: using %s as ancestor of %s and %s\n")
673 % (short(anc), short(self._node), short(n2)) 673 % (short(anc), short(self._node), short(n2))
674 ) 674 )
675 + ''.join( 675 + b''.join(
676 _( 676 _(
677 " alternatively, use --config " 677 b" alternatively, use --config "
678 "merge.preferancestor=%s\n" 678 b"merge.preferancestor=%s\n"
679 ) 679 )
680 % short(n) 680 % short(n)
681 for n in sorted(cahs) 681 for n in sorted(cahs)
682 if n != anc 682 if n != anc
683 ) 683 )
693 693
694 # Wrap match.bad method to have message with nodeid 694 # Wrap match.bad method to have message with nodeid
695 def bad(fn, msg): 695 def bad(fn, msg):
696 # The manifest doesn't know about subrepos, so don't complain about 696 # The manifest doesn't know about subrepos, so don't complain about
697 # paths into valid subrepos. 697 # paths into valid subrepos.
698 if any(fn == s or fn.startswith(s + '/') for s in self.substate): 698 if any(fn == s or fn.startswith(s + b'/') for s in self.substate):
699 return 699 return
700 match.bad(fn, _('no such file in rev %s') % self) 700 match.bad(fn, _(b'no such file in rev %s') % self)
701 701
702 m = matchmod.badmatch(self._repo.narrowmatch(match), bad) 702 m = matchmod.badmatch(self._repo.narrowmatch(match), bad)
703 return self._manifest.walk(m) 703 return self._manifest.walk(m)
704 704
705 def matches(self, match): 705 def matches(self, match):
755 755
756 __bool__ = __nonzero__ 756 __bool__ = __nonzero__
757 757
758 def __bytes__(self): 758 def __bytes__(self):
759 try: 759 try:
760 return "%s@%s" % (self.path(), self._changectx) 760 return b"%s@%s" % (self.path(), self._changectx)
761 except error.LookupError: 761 except error.LookupError:
762 return "%s@???" % self.path() 762 return b"%s@???" % self.path()
763 763
764 __str__ = encoding.strmethod(__bytes__) 764 __str__ = encoding.strmethod(__bytes__)
765 765
766 def __repr__(self): 766 def __repr__(self):
767 return r"<%s %s>" % (type(self).__name__, str(self)) 767 return r"<%s %s>" % (type(self).__name__, str(self))
869 return stringutil.binary(self.data()) 869 return stringutil.binary(self.data())
870 except IOError: 870 except IOError:
871 return False 871 return False
872 872
873 def isexec(self): 873 def isexec(self):
874 return 'x' in self.flags() 874 return b'x' in self.flags()
875 875
876 def islink(self): 876 def islink(self):
877 return 'l' in self.flags() 877 return b'l' in self.flags()
878 878
879 def isabsent(self): 879 def isabsent(self):
880 """whether this filectx represents a file not in self._changectx 880 """whether this filectx represents a file not in self._changectx
881 881
882 This is mainly for merge code to detect change/delete conflicts. This is 882 This is mainly for merge code to detect change/delete conflicts. This is
893 if fctx._customcmp: 893 if fctx._customcmp:
894 return fctx.cmp(self) 894 return fctx.cmp(self)
895 895
896 if self._filenode is None: 896 if self._filenode is None:
897 raise error.ProgrammingError( 897 raise error.ProgrammingError(
898 'filectx.cmp() must be reimplemented if not backed by revlog' 898 b'filectx.cmp() must be reimplemented if not backed by revlog'
899 ) 899 )
900 900
901 if fctx._filenode is None: 901 if fctx._filenode is None:
902 if self._repo._encodefilterpats: 902 if self._repo._encodefilterpats:
903 # can't rely on size() because wdir content may be decoded 903 # can't rely on size() because wdir content may be decoded
1160 self._repo = repo 1160 self._repo = repo
1161 self._path = path 1161 self._path = path
1162 1162
1163 assert ( 1163 assert (
1164 changeid is not None or fileid is not None or changectx is not None 1164 changeid is not None or fileid is not None or changectx is not None
1165 ), "bad args: changeid=%r, fileid=%r, changectx=%r" % ( 1165 ), b"bad args: changeid=%r, fileid=%r, changectx=%r" % (
1166 changeid, 1166 changeid,
1167 fileid, 1167 fileid,
1168 changectx, 1168 changectx,
1169 ) 1169 )
1170 1170
1221 1221
1222 def data(self): 1222 def data(self):
1223 try: 1223 try:
1224 return self._filelog.read(self._filenode) 1224 return self._filelog.read(self._filenode)
1225 except error.CensoredNodeError: 1225 except error.CensoredNodeError:
1226 if self._repo.ui.config("censor", "policy") == "ignore": 1226 if self._repo.ui.config(b"censor", b"policy") == b"ignore":
1227 return "" 1227 return b""
1228 raise error.Abort( 1228 raise error.Abort(
1229 _("censored node: %s") % short(self._filenode), 1229 _(b"censored node: %s") % short(self._filenode),
1230 hint=_("set censor.policy to ignore errors"), 1230 hint=_(b"set censor.policy to ignore errors"),
1231 ) 1231 )
1232 1232
1233 def size(self): 1233 def size(self):
1234 return self._filelog.size(self._filerev) 1234 return self._filelog.size(self._filerev)
1235 1235
1273 wants the ability to commit, e.g. workingctx or memctx.""" 1273 wants the ability to commit, e.g. workingctx or memctx."""
1274 1274
1275 def __init__( 1275 def __init__(
1276 self, 1276 self,
1277 repo, 1277 repo,
1278 text="", 1278 text=b"",
1279 user=None, 1279 user=None,
1280 date=None, 1280 date=None,
1281 extra=None, 1281 extra=None,
1282 changes=None, 1282 changes=None,
1283 branch=None, 1283 branch=None,
1295 1295
1296 self._extra = {} 1296 self._extra = {}
1297 if extra: 1297 if extra:
1298 self._extra = extra.copy() 1298 self._extra = extra.copy()
1299 if branch is not None: 1299 if branch is not None:
1300 self._extra['branch'] = encoding.fromlocal(branch) 1300 self._extra[b'branch'] = encoding.fromlocal(branch)
1301 if not self._extra.get('branch'): 1301 if not self._extra.get(b'branch'):
1302 self._extra['branch'] = 'default' 1302 self._extra[b'branch'] = b'default'
1303 1303
1304 def __bytes__(self): 1304 def __bytes__(self):
1305 return bytes(self._parents[0]) + "+" 1305 return bytes(self._parents[0]) + b"+"
1306 1306
1307 __str__ = encoding.strmethod(__bytes__) 1307 __str__ = encoding.strmethod(__bytes__)
1308 1308
1309 def __nonzero__(self): 1309 def __nonzero__(self):
1310 return True 1310 return True
1320 return self._repo.ui.username() 1320 return self._repo.ui.username()
1321 1321
1322 @propertycache 1322 @propertycache
1323 def _date(self): 1323 def _date(self):
1324 ui = self._repo.ui 1324 ui = self._repo.ui
1325 date = ui.configdate('devel', 'default-date') 1325 date = ui.configdate(b'devel', b'default-date')
1326 if date is None: 1326 if date is None:
1327 date = dateutil.makedate() 1327 date = dateutil.makedate()
1328 return date 1328 return date
1329 1329
1330 def subrev(self, subpath): 1330 def subrev(self, subpath):
1362 filesmodified = modified 1362 filesmodified = modified
1363 filesadded = added 1363 filesadded = added
1364 filesremoved = removed 1364 filesremoved = removed
1365 1365
1366 def branch(self): 1366 def branch(self):
1367 return encoding.tolocal(self._extra['branch']) 1367 return encoding.tolocal(self._extra[b'branch'])
1368 1368
1369 def closesbranch(self): 1369 def closesbranch(self):
1370 return 'close' in self._extra 1370 return b'close' in self._extra
1371 1371
1372 def extra(self): 1372 def extra(self):
1373 return self._extra 1373 return self._extra
1374 1374
1375 def isinmemory(self): 1375 def isinmemory(self):
1431 changes - a list of file lists as returned by localrepo.status() 1431 changes - a list of file lists as returned by localrepo.status()
1432 or None to use the repository status. 1432 or None to use the repository status.
1433 """ 1433 """
1434 1434
1435 def __init__( 1435 def __init__(
1436 self, repo, text="", user=None, date=None, extra=None, changes=None 1436 self, repo, text=b"", user=None, date=None, extra=None, changes=None
1437 ): 1437 ):
1438 branch = None 1438 branch = None
1439 if not extra or 'branch' not in extra: 1439 if not extra or b'branch' not in extra:
1440 try: 1440 try:
1441 branch = repo.dirstate.branch() 1441 branch = repo.dirstate.branch()
1442 except UnicodeDecodeError: 1442 except UnicodeDecodeError:
1443 raise error.Abort(_('branch name not in UTF-8!')) 1443 raise error.Abort(_(b'branch name not in UTF-8!'))
1444 super(workingctx, self).__init__( 1444 super(workingctx, self).__init__(
1445 repo, text, user, date, extra, changes, branch=branch 1445 repo, text, user, date, extra, changes, branch=branch
1446 ) 1446 )
1447 1447
1448 def __iter__(self): 1448 def __iter__(self):
1449 d = self._repo.dirstate 1449 d = self._repo.dirstate
1450 for f in d: 1450 for f in d:
1451 if d[f] != 'r': 1451 if d[f] != b'r':
1452 yield f 1452 yield f
1453 1453
1454 def __contains__(self, key): 1454 def __contains__(self, key):
1455 return self._repo.dirstate[key] not in "?r" 1455 return self._repo.dirstate[key] not in b"?r"
1456 1456
1457 def hex(self): 1457 def hex(self):
1458 return wdirhex 1458 return wdirhex
1459 1459
1460 @propertycache 1460 @propertycache
1499 return fl1 1499 return fl1
1500 if fl1 == fla: 1500 if fl1 == fla:
1501 return fl2 1501 return fl2
1502 if fl2 == fla: 1502 if fl2 == fla:
1503 return fl1 1503 return fl1
1504 return '' # punt for conflicts 1504 return b'' # punt for conflicts
1505 1505
1506 return func 1506 return func
1507 1507
1508 @propertycache 1508 @propertycache
1509 def _flagfunc(self): 1509 def _flagfunc(self):
1512 def flags(self, path): 1512 def flags(self, path):
1513 if r'_manifest' in self.__dict__: 1513 if r'_manifest' in self.__dict__:
1514 try: 1514 try:
1515 return self._manifest.flags(path) 1515 return self._manifest.flags(path)
1516 except KeyError: 1516 except KeyError:
1517 return '' 1517 return b''
1518 1518
1519 try: 1519 try:
1520 return self._flagfunc(path) 1520 return self._flagfunc(path)
1521 except OSError: 1521 except OSError:
1522 return '' 1522 return b''
1523 1523
1524 def filectx(self, path, filelog=None): 1524 def filectx(self, path, filelog=None):
1525 """get a file context from the working directory""" 1525 """get a file context from the working directory"""
1526 return workingfilectx( 1526 return workingfilectx(
1527 self._repo, path, workingctx=self, filelog=filelog 1527 self._repo, path, workingctx=self, filelog=filelog
1528 ) 1528 )
1529 1529
1530 def dirty(self, missing=False, merge=True, branch=True): 1530 def dirty(self, missing=False, merge=True, branch=True):
1531 "check whether a working directory is modified" 1531 b"check whether a working directory is modified"
1532 # check subrepos first 1532 # check subrepos first
1533 for s in sorted(self.substate): 1533 for s in sorted(self.substate):
1534 if self.sub(s).dirty(missing=missing): 1534 if self.sub(s).dirty(missing=missing):
1535 return True 1535 return True
1536 # check current working dir 1536 # check current working dir
1541 or self.added() 1541 or self.added()
1542 or self.removed() 1542 or self.removed()
1543 or (missing and self.deleted()) 1543 or (missing and self.deleted())
1544 ) 1544 )
1545 1545
1546 def add(self, list, prefix=""): 1546 def add(self, list, prefix=b""):
1547 with self._repo.wlock(): 1547 with self._repo.wlock():
1548 ui, ds = self._repo.ui, self._repo.dirstate 1548 ui, ds = self._repo.ui, self._repo.dirstate
1549 uipath = lambda f: ds.pathto(pathutil.join(prefix, f)) 1549 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1550 rejected = [] 1550 rejected = []
1551 lstat = self._repo.wvfs.lstat 1551 lstat = self._repo.wvfs.lstat
1555 # Windows, since it contains the drive letter and colon. 1555 # Windows, since it contains the drive letter and colon.
1556 scmutil.checkportable(ui, os.path.join(prefix, f)) 1556 scmutil.checkportable(ui, os.path.join(prefix, f))
1557 try: 1557 try:
1558 st = lstat(f) 1558 st = lstat(f)
1559 except OSError: 1559 except OSError:
1560 ui.warn(_("%s does not exist!\n") % uipath(f)) 1560 ui.warn(_(b"%s does not exist!\n") % uipath(f))
1561 rejected.append(f) 1561 rejected.append(f)
1562 continue 1562 continue
1563 limit = ui.configbytes('ui', 'large-file-limit') 1563 limit = ui.configbytes(b'ui', b'large-file-limit')
1564 if limit != 0 and st.st_size > limit: 1564 if limit != 0 and st.st_size > limit:
1565 ui.warn( 1565 ui.warn(
1566 _( 1566 _(
1567 "%s: up to %d MB of RAM may be required " 1567 b"%s: up to %d MB of RAM may be required "
1568 "to manage this file\n" 1568 b"to manage this file\n"
1569 "(use 'hg revert %s' to cancel the " 1569 b"(use 'hg revert %s' to cancel the "
1570 "pending addition)\n" 1570 b"pending addition)\n"
1571 ) 1571 )
1572 % (f, 3 * st.st_size // 1000000, uipath(f)) 1572 % (f, 3 * st.st_size // 1000000, uipath(f))
1573 ) 1573 )
1574 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)): 1574 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1575 ui.warn( 1575 ui.warn(
1576 _( 1576 _(
1577 "%s not added: only files and symlinks " 1577 b"%s not added: only files and symlinks "
1578 "supported currently\n" 1578 b"supported currently\n"
1579 ) 1579 )
1580 % uipath(f) 1580 % uipath(f)
1581 ) 1581 )
1582 rejected.append(f) 1582 rejected.append(f)
1583 elif ds[f] in 'amn': 1583 elif ds[f] in b'amn':
1584 ui.warn(_("%s already tracked!\n") % uipath(f)) 1584 ui.warn(_(b"%s already tracked!\n") % uipath(f))
1585 elif ds[f] == 'r': 1585 elif ds[f] == b'r':
1586 ds.normallookup(f) 1586 ds.normallookup(f)
1587 else: 1587 else:
1588 ds.add(f) 1588 ds.add(f)
1589 return rejected 1589 return rejected
1590 1590
1591 def forget(self, files, prefix=""): 1591 def forget(self, files, prefix=b""):
1592 with self._repo.wlock(): 1592 with self._repo.wlock():
1593 ds = self._repo.dirstate 1593 ds = self._repo.dirstate
1594 uipath = lambda f: ds.pathto(pathutil.join(prefix, f)) 1594 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1595 rejected = [] 1595 rejected = []
1596 for f in files: 1596 for f in files:
1597 if f not in ds: 1597 if f not in ds:
1598 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f)) 1598 self._repo.ui.warn(_(b"%s not tracked!\n") % uipath(f))
1599 rejected.append(f) 1599 rejected.append(f)
1600 elif ds[f] != 'a': 1600 elif ds[f] != b'a':
1601 ds.remove(f) 1601 ds.remove(f)
1602 else: 1602 else:
1603 ds.drop(f) 1603 ds.drop(f)
1604 return rejected 1604 return rejected
1605 1605
1608 st = self._repo.wvfs.lstat(dest) 1608 st = self._repo.wvfs.lstat(dest)
1609 except OSError as err: 1609 except OSError as err:
1610 if err.errno != errno.ENOENT: 1610 if err.errno != errno.ENOENT:
1611 raise 1611 raise
1612 self._repo.ui.warn( 1612 self._repo.ui.warn(
1613 _("%s does not exist!\n") % self._repo.dirstate.pathto(dest) 1613 _(b"%s does not exist!\n") % self._repo.dirstate.pathto(dest)
1614 ) 1614 )
1615 return 1615 return
1616 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)): 1616 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1617 self._repo.ui.warn( 1617 self._repo.ui.warn(
1618 _("copy failed: %s is not a file or a " "symbolic link\n") 1618 _(b"copy failed: %s is not a file or a " b"symbolic link\n")
1619 % self._repo.dirstate.pathto(dest) 1619 % self._repo.dirstate.pathto(dest)
1620 ) 1620 )
1621 else: 1621 else:
1622 with self._repo.wlock(): 1622 with self._repo.wlock():
1623 ds = self._repo.dirstate 1623 ds = self._repo.dirstate
1624 if ds[dest] in '?': 1624 if ds[dest] in b'?':
1625 ds.add(dest) 1625 ds.add(dest)
1626 elif ds[dest] in 'r': 1626 elif ds[dest] in b'r':
1627 ds.normallookup(dest) 1627 ds.normallookup(dest)
1628 ds.copy(source, dest) 1628 ds.copy(source, dest)
1629 1629
1630 def match( 1630 def match(
1631 self, 1631 self,
1632 pats=None, 1632 pats=None,
1633 include=None, 1633 include=None,
1634 exclude=None, 1634 exclude=None,
1635 default='glob', 1635 default=b'glob',
1636 listsubrepos=False, 1636 listsubrepos=False,
1637 badfn=None, 1637 badfn=None,
1638 ): 1638 ):
1639 r = self._repo 1639 r = self._repo
1640 1640
1663 # via user error or dereferencing by NFS or Samba servers, 1663 # via user error or dereferencing by NFS or Samba servers,
1664 # so we filter out any placeholders that don't look like a 1664 # so we filter out any placeholders that don't look like a
1665 # symlink 1665 # symlink
1666 sane = [] 1666 sane = []
1667 for f in files: 1667 for f in files:
1668 if self.flags(f) == 'l': 1668 if self.flags(f) == b'l':
1669 d = self[f].data() 1669 d = self[f].data()
1670 if ( 1670 if (
1671 d == '' 1671 d == b''
1672 or len(d) >= 1024 1672 or len(d) >= 1024
1673 or '\n' in d 1673 or b'\n' in d
1674 or stringutil.binary(d) 1674 or stringutil.binary(d)
1675 ): 1675 ):
1676 self._repo.ui.debug( 1676 self._repo.ui.debug(
1677 'ignoring suspect symlink placeholder' ' "%s"\n' % f 1677 b'ignoring suspect symlink placeholder' b' "%s"\n' % f
1678 ) 1678 )
1679 continue 1679 continue
1680 sane.append(f) 1680 sane.append(f)
1681 return sane 1681 return sane
1682 1682
1744 # in this case, writing changes out breaks 1744 # in this case, writing changes out breaks
1745 # consistency, because .hg/dirstate was 1745 # consistency, because .hg/dirstate was
1746 # already changed simultaneously after last 1746 # already changed simultaneously after last
1747 # caching (see also issue5584 for detail) 1747 # caching (see also issue5584 for detail)
1748 self._repo.ui.debug( 1748 self._repo.ui.debug(
1749 'skip updating dirstate: ' 'identity mismatch\n' 1749 b'skip updating dirstate: ' b'identity mismatch\n'
1750 ) 1750 )
1751 except error.LockError: 1751 except error.LockError:
1752 pass 1752 pass
1753 finally: 1753 finally:
1754 # Even if the wlock couldn't be grabbed, clear out the list. 1754 # Even if the wlock couldn't be grabbed, clear out the list.
1755 self._repo.clearpostdsstatus() 1755 self._repo.clearpostdsstatus()
1756 1756
1757 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False): 1757 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1758 '''Gets the status from the dirstate -- internal use only.''' 1758 '''Gets the status from the dirstate -- internal use only.'''
1759 subrepos = [] 1759 subrepos = []
1760 if '.hgsub' in self: 1760 if b'.hgsub' in self:
1761 subrepos = sorted(self.substate) 1761 subrepos = sorted(self.substate)
1762 cmp, s = self._repo.dirstate.status( 1762 cmp, s = self._repo.dirstate.status(
1763 match, subrepos, ignored=ignored, clean=clean, unknown=unknown 1763 match, subrepos, ignored=ignored, clean=clean, unknown=unknown
1764 ) 1764 )
1765 1765
1853 s = self._dirstatestatus(match, listignored, listclean, listunknown) 1853 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1854 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems, 1854 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1855 # might have accidentally ended up with the entire contents of the file 1855 # might have accidentally ended up with the entire contents of the file
1856 # they are supposed to be linking to. 1856 # they are supposed to be linking to.
1857 s.modified[:] = self._filtersuspectsymlink(s.modified) 1857 s.modified[:] = self._filtersuspectsymlink(s.modified)
1858 if other != self._repo['.']: 1858 if other != self._repo[b'.']:
1859 s = super(workingctx, self)._buildstatus( 1859 s = super(workingctx, self)._buildstatus(
1860 other, s, match, listignored, listclean, listunknown 1860 other, s, match, listignored, listclean, listunknown
1861 ) 1861 )
1862 return s 1862 return s
1863 1863
1869 comparing against the parent changeset. 1869 comparing against the parent changeset.
1870 1870
1871 If we aren't comparing against the working directory's parent, then we 1871 If we aren't comparing against the working directory's parent, then we
1872 just use the default match object sent to us. 1872 just use the default match object sent to us.
1873 """ 1873 """
1874 if other != self._repo['.']: 1874 if other != self._repo[b'.']:
1875 1875
1876 def bad(f, msg): 1876 def bad(f, msg):
1877 # 'f' may be a directory pattern from 'match.files()', 1877 # 'f' may be a directory pattern from 'match.files()',
1878 # so 'f not in ctx1' is not enough 1878 # so 'f not in ctx1' is not enough
1879 if f not in other and not other.hasdir(f): 1879 if f not in other and not other.hasdir(f):
1880 self._repo.ui.warn( 1880 self._repo.ui.warn(
1881 '%s: %s\n' % (self._repo.dirstate.pathto(f), msg) 1881 b'%s: %s\n' % (self._repo.dirstate.pathto(f), msg)
1882 ) 1882 )
1883 1883
1884 match.bad = bad 1884 match.bad = bad
1885 return match 1885 return match
1886 1886
1896 ) 1896 )
1897 1897
1898 def matches(self, match): 1898 def matches(self, match):
1899 match = self._repo.narrowmatch(match) 1899 match = self._repo.narrowmatch(match)
1900 ds = self._repo.dirstate 1900 ds = self._repo.dirstate
1901 return sorted(f for f in ds.matches(match) if ds[f] != 'r') 1901 return sorted(f for f in ds.matches(match) if ds[f] != b'r')
1902 1902
1903 def markcommitted(self, node): 1903 def markcommitted(self, node):
1904 with self._repo.dirstate.parentchange(): 1904 with self._repo.dirstate.parentchange():
1905 for f in self.modified() + self.added(): 1905 for f in self.modified() + self.added():
1906 self._repo.dirstate.normal(f) 1906 self._repo.dirstate.normal(f)
2025 # invert comparison to reuse the same code path 2025 # invert comparison to reuse the same code path
2026 return fctx.cmp(self) 2026 return fctx.cmp(self)
2027 2027
2028 def remove(self, ignoremissing=False): 2028 def remove(self, ignoremissing=False):
2029 """wraps unlink for a repo's working directory""" 2029 """wraps unlink for a repo's working directory"""
2030 rmdir = self._repo.ui.configbool('experimental', 'removeemptydirs') 2030 rmdir = self._repo.ui.configbool(b'experimental', b'removeemptydirs')
2031 self._repo.wvfs.unlinkpath( 2031 self._repo.wvfs.unlinkpath(
2032 self._path, ignoremissing=ignoremissing, rmdir=rmdir 2032 self._path, ignoremissing=ignoremissing, rmdir=rmdir
2033 ) 2033 )
2034 2034
2035 def write(self, data, flags, backgroundclose=False, **kwargs): 2035 def write(self, data, flags, backgroundclose=False, **kwargs):
2047 ``write()`` can be called successfully. 2047 ``write()`` can be called successfully.
2048 """ 2048 """
2049 wvfs = self._repo.wvfs 2049 wvfs = self._repo.wvfs
2050 f = self._path 2050 f = self._path
2051 wvfs.audit(f) 2051 wvfs.audit(f)
2052 if self._repo.ui.configbool('experimental', 'merge.checkpathconflicts'): 2052 if self._repo.ui.configbool(
2053 b'experimental', b'merge.checkpathconflicts'
2054 ):
2053 # remove files under the directory as they should already be 2055 # remove files under the directory as they should already be
2054 # warned and backed up 2056 # warned and backed up
2055 if wvfs.isdir(f) and not wvfs.islink(f): 2057 if wvfs.isdir(f) and not wvfs.islink(f):
2056 wvfs.rmtree(f, forcibly=True) 2058 wvfs.rmtree(f, forcibly=True)
2057 for p in reversed(list(util.finddirs(f))): 2059 for p in reversed(list(util.finddirs(f))):
2090 self._wrappedctx = wrappedctx 2092 self._wrappedctx = wrappedctx
2091 self._parents = [wrappedctx] 2093 self._parents = [wrappedctx]
2092 # Drop old manifest cache as it is now out of date. 2094 # Drop old manifest cache as it is now out of date.
2093 # This is necessary when, e.g., rebasing several nodes with one 2095 # This is necessary when, e.g., rebasing several nodes with one
2094 # ``overlayworkingctx`` (e.g. with --collapse). 2096 # ``overlayworkingctx`` (e.g. with --collapse).
2095 util.clearcachedproperty(self, '_manifest') 2097 util.clearcachedproperty(self, b'_manifest')
2096 2098
2097 def data(self, path): 2099 def data(self, path):
2098 if self.isdirty(path): 2100 if self.isdirty(path):
2099 if self._cache[path]['exists']: 2101 if self._cache[path][b'exists']:
2100 if self._cache[path]['data'] is not None: 2102 if self._cache[path][b'data'] is not None:
2101 return self._cache[path]['data'] 2103 return self._cache[path][b'data']
2102 else: 2104 else:
2103 # Must fallback here, too, because we only set flags. 2105 # Must fallback here, too, because we only set flags.
2104 return self._wrappedctx[path].data() 2106 return self._wrappedctx[path].data()
2105 else: 2107 else:
2106 raise error.ProgrammingError( 2108 raise error.ProgrammingError(
2107 "No such file or directory: %s" % path 2109 b"No such file or directory: %s" % path
2108 ) 2110 )
2109 else: 2111 else:
2110 return self._wrappedctx[path].data() 2112 return self._wrappedctx[path].data()
2111 2113
2112 @propertycache 2114 @propertycache
2126 return man 2128 return man
2127 2129
2128 @propertycache 2130 @propertycache
2129 def _flagfunc(self): 2131 def _flagfunc(self):
2130 def f(path): 2132 def f(path):
2131 return self._cache[path]['flags'] 2133 return self._cache[path][b'flags']
2132 2134
2133 return f 2135 return f
2134 2136
2135 def files(self): 2137 def files(self):
2136 return sorted(self.added() + self.modified() + self.removed()) 2138 return sorted(self.added() + self.modified() + self.removed())
2137 2139
2138 def modified(self): 2140 def modified(self):
2139 return [ 2141 return [
2140 f 2142 f
2141 for f in self._cache.keys() 2143 for f in self._cache.keys()
2142 if self._cache[f]['exists'] and self._existsinparent(f) 2144 if self._cache[f][b'exists'] and self._existsinparent(f)
2143 ] 2145 ]
2144 2146
2145 def added(self): 2147 def added(self):
2146 return [ 2148 return [
2147 f 2149 f
2148 for f in self._cache.keys() 2150 for f in self._cache.keys()
2149 if self._cache[f]['exists'] and not self._existsinparent(f) 2151 if self._cache[f][b'exists'] and not self._existsinparent(f)
2150 ] 2152 ]
2151 2153
2152 def removed(self): 2154 def removed(self):
2153 return [ 2155 return [
2154 f 2156 f
2155 for f in self._cache.keys() 2157 for f in self._cache.keys()
2156 if not self._cache[f]['exists'] and self._existsinparent(f) 2158 if not self._cache[f][b'exists'] and self._existsinparent(f)
2157 ] 2159 ]
2158 2160
2159 def p1copies(self): 2161 def p1copies(self):
2160 copies = self._repo._wrappedctx.p1copies().copy() 2162 copies = self._repo._wrappedctx.p1copies().copy()
2161 narrowmatch = self._repo.narrowmatch() 2163 narrowmatch = self._repo.narrowmatch()
2162 for f in self._cache.keys(): 2164 for f in self._cache.keys():
2163 if not narrowmatch(f): 2165 if not narrowmatch(f):
2164 continue 2166 continue
2165 copies.pop(f, None) # delete if it exists 2167 copies.pop(f, None) # delete if it exists
2166 source = self._cache[f]['copied'] 2168 source = self._cache[f][b'copied']
2167 if source: 2169 if source:
2168 copies[f] = source 2170 copies[f] = source
2169 return copies 2171 return copies
2170 2172
2171 def p2copies(self): 2173 def p2copies(self):
2173 narrowmatch = self._repo.narrowmatch() 2175 narrowmatch = self._repo.narrowmatch()
2174 for f in self._cache.keys(): 2176 for f in self._cache.keys():
2175 if not narrowmatch(f): 2177 if not narrowmatch(f):
2176 continue 2178 continue
2177 copies.pop(f, None) # delete if it exists 2179 copies.pop(f, None) # delete if it exists
2178 source = self._cache[f]['copied'] 2180 source = self._cache[f][b'copied']
2179 if source: 2181 if source:
2180 copies[f] = source 2182 copies[f] = source
2181 return copies 2183 return copies
2182 2184
2183 def isinmemory(self): 2185 def isinmemory(self):
2184 return True 2186 return True
2185 2187
2186 def filedate(self, path): 2188 def filedate(self, path):
2187 if self.isdirty(path): 2189 if self.isdirty(path):
2188 return self._cache[path]['date'] 2190 return self._cache[path][b'date']
2189 else: 2191 else:
2190 return self._wrappedctx[path].date() 2192 return self._wrappedctx[path].date()
2191 2193
2192 def markcopied(self, path, origin): 2194 def markcopied(self, path, origin):
2193 self._markdirty( 2195 self._markdirty(
2198 copied=origin, 2200 copied=origin,
2199 ) 2201 )
2200 2202
2201 def copydata(self, path): 2203 def copydata(self, path):
2202 if self.isdirty(path): 2204 if self.isdirty(path):
2203 return self._cache[path]['copied'] 2205 return self._cache[path][b'copied']
2204 else: 2206 else:
2205 return None 2207 return None
2206 2208
2207 def flags(self, path): 2209 def flags(self, path):
2208 if self.isdirty(path): 2210 if self.isdirty(path):
2209 if self._cache[path]['exists']: 2211 if self._cache[path][b'exists']:
2210 return self._cache[path]['flags'] 2212 return self._cache[path][b'flags']
2211 else: 2213 else:
2212 raise error.ProgrammingError( 2214 raise error.ProgrammingError(
2213 "No such file or directory: %s" % self._path 2215 b"No such file or directory: %s" % self._path
2214 ) 2216 )
2215 else: 2217 else:
2216 return self._wrappedctx[path].flags() 2218 return self._wrappedctx[path].flags()
2217 2219
2218 def __contains__(self, key): 2220 def __contains__(self, key):
2219 if key in self._cache: 2221 if key in self._cache:
2220 return self._cache[key]['exists'] 2222 return self._cache[key][b'exists']
2221 return key in self.p1() 2223 return key in self.p1()
2222 2224
2223 def _existsinparent(self, path): 2225 def _existsinparent(self, path):
2224 try: 2226 try:
2225 # ``commitctx` raises a ``ManifestLookupError`` if a path does not 2227 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
2239 """ 2241 """
2240 2242
2241 def fail(path, component): 2243 def fail(path, component):
2242 # p1() is the base and we're receiving "writes" for p2()'s 2244 # p1() is the base and we're receiving "writes" for p2()'s
2243 # files. 2245 # files.
2244 if 'l' in self.p1()[component].flags(): 2246 if b'l' in self.p1()[component].flags():
2245 raise error.Abort( 2247 raise error.Abort(
2246 "error: %s conflicts with symlink %s " 2248 b"error: %s conflicts with symlink %s "
2247 "in %d." % (path, component, self.p1().rev()) 2249 b"in %d." % (path, component, self.p1().rev())
2248 ) 2250 )
2249 else: 2251 else:
2250 raise error.Abort( 2252 raise error.Abort(
2251 "error: '%s' conflicts with file '%s' in " 2253 b"error: '%s' conflicts with file '%s' in "
2252 "%d." % (path, component, self.p1().rev()) 2254 b"%d." % (path, component, self.p1().rev())
2253 ) 2255 )
2254 2256
2255 # Test that each new directory to be created to write this path from p2 2257 # Test that each new directory to be created to write this path from p2
2256 # is not a file in p1. 2258 # is not a file in p1.
2257 components = path.split('/') 2259 components = path.split(b'/')
2258 for i in pycompat.xrange(len(components)): 2260 for i in pycompat.xrange(len(components)):
2259 component = "/".join(components[0:i]) 2261 component = b"/".join(components[0:i])
2260 if component in self: 2262 if component in self:
2261 fail(path, component) 2263 fail(path, component)
2262 2264
2263 # Test the other direction -- that this path from p2 isn't a directory 2265 # Test the other direction -- that this path from p2 isn't a directory
2264 # in p1 (test that p1 doesn't have any paths matching `path/*`). 2266 # in p1 (test that p1 doesn't have any paths matching `path/*`).
2271 # omit the files which are deleted in current IMM wctx 2273 # omit the files which are deleted in current IMM wctx
2272 mfiles = [m for m in mfiles if m in self] 2274 mfiles = [m for m in mfiles if m in self]
2273 if not mfiles: 2275 if not mfiles:
2274 return 2276 return
2275 raise error.Abort( 2277 raise error.Abort(
2276 "error: file '%s' cannot be written because " 2278 b"error: file '%s' cannot be written because "
2277 " '%s/' is a directory in %s (containing %d " 2279 b" '%s/' is a directory in %s (containing %d "
2278 "entries: %s)" 2280 b"entries: %s)"
2279 % (path, path, self.p1(), len(mfiles), ', '.join(mfiles)) 2281 % (path, path, self.p1(), len(mfiles), b', '.join(mfiles))
2280 ) 2282 )
2281 2283
2282 def write(self, path, data, flags='', **kwargs): 2284 def write(self, path, data, flags=b'', **kwargs):
2283 if data is None: 2285 if data is None:
2284 raise error.ProgrammingError("data must be non-None") 2286 raise error.ProgrammingError(b"data must be non-None")
2285 self._auditconflicts(path) 2287 self._auditconflicts(path)
2286 self._markdirty( 2288 self._markdirty(
2287 path, exists=True, data=data, date=dateutil.makedate(), flags=flags 2289 path, exists=True, data=data, date=dateutil.makedate(), flags=flags
2288 ) 2290 )
2289 2291
2290 def setflags(self, path, l, x): 2292 def setflags(self, path, l, x):
2291 flag = '' 2293 flag = b''
2292 if l: 2294 if l:
2293 flag = 'l' 2295 flag = b'l'
2294 elif x: 2296 elif x:
2295 flag = 'x' 2297 flag = b'x'
2296 self._markdirty(path, exists=True, date=dateutil.makedate(), flags=flag) 2298 self._markdirty(path, exists=True, date=dateutil.makedate(), flags=flag)
2297 2299
2298 def remove(self, path): 2300 def remove(self, path):
2299 self._markdirty(path, exists=False) 2301 self._markdirty(path, exists=False)
2300 2302
2304 """ 2306 """
2305 if self.isdirty(path): 2307 if self.isdirty(path):
2306 # If this path exists and is a symlink, "follow" it by calling 2308 # If this path exists and is a symlink, "follow" it by calling
2307 # exists on the destination path. 2309 # exists on the destination path.
2308 if ( 2310 if (
2309 self._cache[path]['exists'] 2311 self._cache[path][b'exists']
2310 and 'l' in self._cache[path]['flags'] 2312 and b'l' in self._cache[path][b'flags']
2311 ): 2313 ):
2312 return self.exists(self._cache[path]['data'].strip()) 2314 return self.exists(self._cache[path][b'data'].strip())
2313 else: 2315 else:
2314 return self._cache[path]['exists'] 2316 return self._cache[path][b'exists']
2315 2317
2316 return self._existsinparent(path) 2318 return self._existsinparent(path)
2317 2319
2318 def lexists(self, path): 2320 def lexists(self, path):
2319 """lexists returns True if the path exists""" 2321 """lexists returns True if the path exists"""
2320 if self.isdirty(path): 2322 if self.isdirty(path):
2321 return self._cache[path]['exists'] 2323 return self._cache[path][b'exists']
2322 2324
2323 return self._existsinparent(path) 2325 return self._existsinparent(path)
2324 2326
2325 def size(self, path): 2327 def size(self, path):
2326 if self.isdirty(path): 2328 if self.isdirty(path):
2327 if self._cache[path]['exists']: 2329 if self._cache[path][b'exists']:
2328 return len(self._cache[path]['data']) 2330 return len(self._cache[path][b'data'])
2329 else: 2331 else:
2330 raise error.ProgrammingError( 2332 raise error.ProgrammingError(
2331 "No such file or directory: %s" % self._path 2333 b"No such file or directory: %s" % self._path
2332 ) 2334 )
2333 return self._wrappedctx[path].size() 2335 return self._wrappedctx[path].size()
2334 2336
2335 def tomemctx( 2337 def tomemctx(
2336 self, 2338 self,
2361 parents = (self._repo[parents[0]], self._repo[parents[1]]) 2363 parents = (self._repo[parents[0]], self._repo[parents[1]])
2362 2364
2363 files = self.files() 2365 files = self.files()
2364 2366
2365 def getfile(repo, memctx, path): 2367 def getfile(repo, memctx, path):
2366 if self._cache[path]['exists']: 2368 if self._cache[path][b'exists']:
2367 return memfilectx( 2369 return memfilectx(
2368 repo, 2370 repo,
2369 memctx, 2371 memctx,
2370 path, 2372 path,
2371 self._cache[path]['data'], 2373 self._cache[path][b'data'],
2372 'l' in self._cache[path]['flags'], 2374 b'l' in self._cache[path][b'flags'],
2373 'x' in self._cache[path]['flags'], 2375 b'x' in self._cache[path][b'flags'],
2374 self._cache[path]['copied'], 2376 self._cache[path][b'copied'],
2375 ) 2377 )
2376 else: 2378 else:
2377 # Returning None, but including the path in `files`, is 2379 # Returning None, but including the path in `files`, is
2378 # necessary for memctx to register a deletion. 2380 # necessary for memctx to register a deletion.
2379 return None 2381 return None
2422 for path in self._cache.keys(): 2424 for path in self._cache.keys():
2423 cache = self._cache[path] 2425 cache = self._cache[path]
2424 try: 2426 try:
2425 underlying = self._wrappedctx[path] 2427 underlying = self._wrappedctx[path]
2426 if ( 2428 if (
2427 underlying.data() == cache['data'] 2429 underlying.data() == cache[b'data']
2428 and underlying.flags() == cache['flags'] 2430 and underlying.flags() == cache[b'flags']
2429 ): 2431 ):
2430 keys.append(path) 2432 keys.append(path)
2431 except error.ManifestLookupError: 2433 except error.ManifestLookupError:
2432 # Path not in the underlying manifest (created). 2434 # Path not in the underlying manifest (created).
2433 continue 2435 continue
2435 for path in keys: 2437 for path in keys:
2436 del self._cache[path] 2438 del self._cache[path]
2437 return keys 2439 return keys
2438 2440
2439 def _markdirty( 2441 def _markdirty(
2440 self, path, exists, data=None, date=None, flags='', copied=None 2442 self, path, exists, data=None, date=None, flags=b'', copied=None
2441 ): 2443 ):
2442 # data not provided, let's see if we already have some; if not, let's 2444 # data not provided, let's see if we already have some; if not, let's
2443 # grab it from our underlying context, so that we always have data if 2445 # grab it from our underlying context, so that we always have data if
2444 # the file is marked as existing. 2446 # the file is marked as existing.
2445 if exists and data is None: 2447 if exists and data is None:
2446 oldentry = self._cache.get(path) or {} 2448 oldentry = self._cache.get(path) or {}
2447 data = oldentry.get('data') 2449 data = oldentry.get(b'data')
2448 if data is None: 2450 if data is None:
2449 data = self._wrappedctx[path].data() 2451 data = self._wrappedctx[path].data()
2450 2452
2451 self._cache[path] = { 2453 self._cache[path] = {
2452 'exists': exists, 2454 b'exists': exists,
2453 'data': data, 2455 b'data': data,
2454 'date': date, 2456 b'date': date,
2455 'flags': flags, 2457 b'flags': flags,
2456 'copied': copied, 2458 b'copied': copied,
2457 } 2459 }
2458 2460
2459 def filectx(self, path, filelog=None): 2461 def filectx(self, path, filelog=None):
2460 return overlayworkingfilectx( 2462 return overlayworkingfilectx(
2461 self._repo, path, parent=self, filelog=filelog 2463 self._repo, path, parent=self, filelog=filelog
2525 This hides changes in the working directory, if they aren't 2527 This hides changes in the working directory, if they aren't
2526 committed in this context. 2528 committed in this context.
2527 """ 2529 """
2528 2530
2529 def __init__( 2531 def __init__(
2530 self, repo, changes, text="", user=None, date=None, extra=None 2532 self, repo, changes, text=b"", user=None, date=None, extra=None
2531 ): 2533 ):
2532 super(workingcommitctx, self).__init__( 2534 super(workingcommitctx, self).__init__(
2533 repo, text, user, date, extra, changes 2535 repo, text, user, date, extra, changes
2534 ) 2536 )
2535 2537
2779 copied is the source file path if current file was copied in the 2781 copied is the source file path if current file was copied in the
2780 revision being committed, or None.""" 2782 revision being committed, or None."""
2781 super(memfilectx, self).__init__(repo, path, None, changectx) 2783 super(memfilectx, self).__init__(repo, path, None, changectx)
2782 self._data = data 2784 self._data = data
2783 if islink: 2785 if islink:
2784 self._flags = 'l' 2786 self._flags = b'l'
2785 elif isexec: 2787 elif isexec:
2786 self._flags = 'x' 2788 self._flags = b'x'
2787 else: 2789 else:
2788 self._flags = '' 2790 self._flags = b''
2789 self._copysource = copysource 2791 self._copysource = copysource
2790 2792
2791 def copysource(self): 2793 def copysource(self):
2792 return self._copysource 2794 return self._copysource
2793 2795
2928 self._path = path 2930 self._path = path
2929 2931
2930 def cmp(self, fctx): 2932 def cmp(self, fctx):
2931 # filecmp follows symlinks whereas `cmp` should not, so skip the fast 2933 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2932 # path if either side is a symlink. 2934 # path if either side is a symlink.
2933 symlinks = 'l' in self.flags() or 'l' in fctx.flags() 2935 symlinks = b'l' in self.flags() or b'l' in fctx.flags()
2934 if not symlinks and isinstance(fctx, workingfilectx) and self._repo: 2936 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2935 # Add a fast-path for merge if both sides are disk-backed. 2937 # Add a fast-path for merge if both sides are disk-backed.
2936 # Note that filecmp uses the opposite return values (True if same) 2938 # Note that filecmp uses the opposite return values (True if same)
2937 # from our cmp functions (True if different). 2939 # from our cmp functions (True if different).
2938 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path())) 2940 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2940 2942
2941 def path(self): 2943 def path(self):
2942 return self._path 2944 return self._path
2943 2945
2944 def flags(self): 2946 def flags(self):
2945 return '' 2947 return b''
2946 2948
2947 def data(self): 2949 def data(self):
2948 return util.readfile(self._path) 2950 return util.readfile(self._path)
2949 2951
2950 def decodeddata(self): 2952 def decodeddata(self):
2951 with open(self._path, "rb") as f: 2953 with open(self._path, b"rb") as f:
2952 return f.read() 2954 return f.read()
2953 2955
2954 def remove(self): 2956 def remove(self):
2955 util.unlink(self._path) 2957 util.unlink(self._path)
2956 2958
2957 def write(self, data, flags, **kwargs): 2959 def write(self, data, flags, **kwargs):
2958 assert not flags 2960 assert not flags
2959 with open(self._path, "wb") as f: 2961 with open(self._path, b"wb") as f:
2960 f.write(data) 2962 f.write(data)