Mercurial > public > mercurial-scm > hg-stable
comparison mercurial/debugcommands.py @ 43077:687b865b95ad
formatting: byteify all mercurial/ and hgext/ string literals
Done with
python3.7 contrib/byteify-strings.py -i $(hg files 'set:mercurial/**.py - mercurial/thirdparty/** + hgext/**.py - hgext/fsmonitor/pywatchman/** - mercurial/__init__.py')
black -l 80 -t py33 -S $(hg files 'set:**.py - mercurial/thirdparty/** - "contrib/python-zstandard/**" - hgext/fsmonitor/pywatchman/**')
# skip-blame mass-reformatting only
Differential Revision: https://phab.mercurial-scm.org/D6972
author | Augie Fackler <augie@google.com> |
---|---|
date | Sun, 06 Oct 2019 09:48:39 -0400 |
parents | 2372284d9457 |
children | 86e4daa2d54c |
comparison
equal
deleted
inserted
replaced
43076:2372284d9457 | 43077:687b865b95ad |
---|---|
93 release = lockmod.release | 93 release = lockmod.release |
94 | 94 |
95 command = registrar.command() | 95 command = registrar.command() |
96 | 96 |
97 | 97 |
98 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True) | 98 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True) |
99 def debugancestor(ui, repo, *args): | 99 def debugancestor(ui, repo, *args): |
100 """find the ancestor revision of two revisions in a given index""" | 100 """find the ancestor revision of two revisions in a given index""" |
101 if len(args) == 3: | 101 if len(args) == 3: |
102 index, rev1, rev2 = args | 102 index, rev1, rev2 = args |
103 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index) | 103 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index) |
104 lookup = r.lookup | 104 lookup = r.lookup |
105 elif len(args) == 2: | 105 elif len(args) == 2: |
106 if not repo: | 106 if not repo: |
107 raise error.Abort( | 107 raise error.Abort( |
108 _('there is no Mercurial repository here ' '(.hg not found)') | 108 _(b'there is no Mercurial repository here ' b'(.hg not found)') |
109 ) | 109 ) |
110 rev1, rev2 = args | 110 rev1, rev2 = args |
111 r = repo.changelog | 111 r = repo.changelog |
112 lookup = repo.lookup | 112 lookup = repo.lookup |
113 else: | 113 else: |
114 raise error.Abort(_('either two or three arguments required')) | 114 raise error.Abort(_(b'either two or three arguments required')) |
115 a = r.ancestor(lookup(rev1), lookup(rev2)) | 115 a = r.ancestor(lookup(rev1), lookup(rev2)) |
116 ui.write('%d:%s\n' % (r.rev(a), hex(a))) | 116 ui.write(b'%d:%s\n' % (r.rev(a), hex(a))) |
117 | 117 |
118 | 118 |
119 @command('debugapplystreamclonebundle', [], 'FILE') | 119 @command(b'debugapplystreamclonebundle', [], b'FILE') |
120 def debugapplystreamclonebundle(ui, repo, fname): | 120 def debugapplystreamclonebundle(ui, repo, fname): |
121 """apply a stream clone bundle file""" | 121 """apply a stream clone bundle file""" |
122 f = hg.openpath(ui, fname) | 122 f = hg.openpath(ui, fname) |
123 gen = exchange.readbundle(ui, f, fname) | 123 gen = exchange.readbundle(ui, f, fname) |
124 gen.apply(repo) | 124 gen.apply(repo) |
125 | 125 |
126 | 126 |
127 @command( | 127 @command( |
128 'debugbuilddag', | 128 b'debugbuilddag', |
129 [ | 129 [ |
130 ('m', 'mergeable-file', None, _('add single file mergeable changes')), | |
131 ( | 130 ( |
132 'o', | 131 b'm', |
133 'overwritten-file', | 132 b'mergeable-file', |
134 None, | 133 None, |
135 _('add single file all revs overwrite'), | 134 _(b'add single file mergeable changes'), |
136 ), | 135 ), |
137 ('n', 'new-file', None, _('add new file at each rev')), | 136 ( |
137 b'o', | |
138 b'overwritten-file', | |
139 None, | |
140 _(b'add single file all revs overwrite'), | |
141 ), | |
142 (b'n', b'new-file', None, _(b'add new file at each rev')), | |
138 ], | 143 ], |
139 _('[OPTION]... [TEXT]'), | 144 _(b'[OPTION]... [TEXT]'), |
140 ) | 145 ) |
141 def debugbuilddag( | 146 def debugbuilddag( |
142 ui, | 147 ui, |
143 repo, | 148 repo, |
144 text=None, | 149 text=None, |
177 All string valued-elements are either strictly alphanumeric, or must | 182 All string valued-elements are either strictly alphanumeric, or must |
178 be enclosed in double quotes ("..."), with "\\" as escape character. | 183 be enclosed in double quotes ("..."), with "\\" as escape character. |
179 """ | 184 """ |
180 | 185 |
181 if text is None: | 186 if text is None: |
182 ui.status(_("reading DAG from stdin\n")) | 187 ui.status(_(b"reading DAG from stdin\n")) |
183 text = ui.fin.read() | 188 text = ui.fin.read() |
184 | 189 |
185 cl = repo.changelog | 190 cl = repo.changelog |
186 if len(cl) > 0: | 191 if len(cl) > 0: |
187 raise error.Abort(_('repository is not empty')) | 192 raise error.Abort(_(b'repository is not empty')) |
188 | 193 |
189 # determine number of revs in DAG | 194 # determine number of revs in DAG |
190 total = 0 | 195 total = 0 |
191 for type, data in dagparser.parsedag(text): | 196 for type, data in dagparser.parsedag(text): |
192 if type == 'n': | 197 if type == b'n': |
193 total += 1 | 198 total += 1 |
194 | 199 |
195 if mergeable_file: | 200 if mergeable_file: |
196 linesperrev = 2 | 201 linesperrev = 2 |
197 # make a file with k lines per rev | 202 # make a file with k lines per rev |
198 initialmergedlines = [ | 203 initialmergedlines = [ |
199 '%d' % i for i in pycompat.xrange(0, total * linesperrev) | 204 b'%d' % i for i in pycompat.xrange(0, total * linesperrev) |
200 ] | 205 ] |
201 initialmergedlines.append("") | 206 initialmergedlines.append(b"") |
202 | 207 |
203 tags = [] | 208 tags = [] |
204 progress = ui.makeprogress(_('building'), unit=_('revisions'), total=total) | 209 progress = ui.makeprogress( |
205 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"): | 210 _(b'building'), unit=_(b'revisions'), total=total |
211 ) | |
212 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"): | |
206 at = -1 | 213 at = -1 |
207 atbranch = 'default' | 214 atbranch = b'default' |
208 nodeids = [] | 215 nodeids = [] |
209 id = 0 | 216 id = 0 |
210 progress.update(id) | 217 progress.update(id) |
211 for type, data in dagparser.parsedag(text): | 218 for type, data in dagparser.parsedag(text): |
212 if type == 'n': | 219 if type == b'n': |
213 ui.note(('node %s\n' % pycompat.bytestr(data))) | 220 ui.note((b'node %s\n' % pycompat.bytestr(data))) |
214 id, ps = data | 221 id, ps = data |
215 | 222 |
216 files = [] | 223 files = [] |
217 filecontent = {} | 224 filecontent = {} |
218 | 225 |
219 p2 = None | 226 p2 = None |
220 if mergeable_file: | 227 if mergeable_file: |
221 fn = "mf" | 228 fn = b"mf" |
222 p1 = repo[ps[0]] | 229 p1 = repo[ps[0]] |
223 if len(ps) > 1: | 230 if len(ps) > 1: |
224 p2 = repo[ps[1]] | 231 p2 = repo[ps[1]] |
225 pa = p1.ancestor(p2) | 232 pa = p1.ancestor(p2) |
226 base, local, other = [ | 233 base, local, other = [ |
227 x[fn].data() for x in (pa, p1, p2) | 234 x[fn].data() for x in (pa, p1, p2) |
228 ] | 235 ] |
229 m3 = simplemerge.Merge3Text(base, local, other) | 236 m3 = simplemerge.Merge3Text(base, local, other) |
230 ml = [l.strip() for l in m3.merge_lines()] | 237 ml = [l.strip() for l in m3.merge_lines()] |
231 ml.append("") | 238 ml.append(b"") |
232 elif at > 0: | 239 elif at > 0: |
233 ml = p1[fn].data().split("\n") | 240 ml = p1[fn].data().split(b"\n") |
234 else: | 241 else: |
235 ml = initialmergedlines | 242 ml = initialmergedlines |
236 ml[id * linesperrev] += " r%i" % id | 243 ml[id * linesperrev] += b" r%i" % id |
237 mergedtext = "\n".join(ml) | 244 mergedtext = b"\n".join(ml) |
238 files.append(fn) | 245 files.append(fn) |
239 filecontent[fn] = mergedtext | 246 filecontent[fn] = mergedtext |
240 | 247 |
241 if overwritten_file: | 248 if overwritten_file: |
242 fn = "of" | 249 fn = b"of" |
243 files.append(fn) | 250 files.append(fn) |
244 filecontent[fn] = "r%i\n" % id | 251 filecontent[fn] = b"r%i\n" % id |
245 | 252 |
246 if new_file: | 253 if new_file: |
247 fn = "nf%i" % id | 254 fn = b"nf%i" % id |
248 files.append(fn) | 255 files.append(fn) |
249 filecontent[fn] = "r%i\n" % id | 256 filecontent[fn] = b"r%i\n" % id |
250 if len(ps) > 1: | 257 if len(ps) > 1: |
251 if not p2: | 258 if not p2: |
252 p2 = repo[ps[1]] | 259 p2 = repo[ps[1]] |
253 for fn in p2: | 260 for fn in p2: |
254 if fn.startswith("nf"): | 261 if fn.startswith(b"nf"): |
255 files.append(fn) | 262 files.append(fn) |
256 filecontent[fn] = p2[fn].data() | 263 filecontent[fn] = p2[fn].data() |
257 | 264 |
258 def fctxfn(repo, cx, path): | 265 def fctxfn(repo, cx, path): |
259 if path in filecontent: | 266 if path in filecontent: |
269 else: | 276 else: |
270 pars = [nodeids[p] for p in ps] | 277 pars = [nodeids[p] for p in ps] |
271 cx = context.memctx( | 278 cx = context.memctx( |
272 repo, | 279 repo, |
273 pars, | 280 pars, |
274 "r%i" % id, | 281 b"r%i" % id, |
275 files, | 282 files, |
276 fctxfn, | 283 fctxfn, |
277 date=(id, 0), | 284 date=(id, 0), |
278 user="debugbuilddag", | 285 user=b"debugbuilddag", |
279 extra={'branch': atbranch}, | 286 extra={b'branch': atbranch}, |
280 ) | 287 ) |
281 nodeid = repo.commitctx(cx) | 288 nodeid = repo.commitctx(cx) |
282 nodeids.append(nodeid) | 289 nodeids.append(nodeid) |
283 at = id | 290 at = id |
284 elif type == 'l': | 291 elif type == b'l': |
285 id, name = data | 292 id, name = data |
286 ui.note(('tag %s\n' % name)) | 293 ui.note((b'tag %s\n' % name)) |
287 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name)) | 294 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name)) |
288 elif type == 'a': | 295 elif type == b'a': |
289 ui.note(('branch %s\n' % data)) | 296 ui.note((b'branch %s\n' % data)) |
290 atbranch = data | 297 atbranch = data |
291 progress.update(id) | 298 progress.update(id) |
292 | 299 |
293 if tags: | 300 if tags: |
294 repo.vfs.write("localtags", "".join(tags)) | 301 repo.vfs.write(b"localtags", b"".join(tags)) |
295 | 302 |
296 | 303 |
297 def _debugchangegroup(ui, gen, all=None, indent=0, **opts): | 304 def _debugchangegroup(ui, gen, all=None, indent=0, **opts): |
298 indent_string = ' ' * indent | 305 indent_string = b' ' * indent |
299 if all: | 306 if all: |
300 ui.write( | 307 ui.write( |
301 "%sformat: id, p1, p2, cset, delta base, len(delta)\n" | 308 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n" |
302 % indent_string | 309 % indent_string |
303 ) | 310 ) |
304 | 311 |
305 def showchunks(named): | 312 def showchunks(named): |
306 ui.write("\n%s%s\n" % (indent_string, named)) | 313 ui.write(b"\n%s%s\n" % (indent_string, named)) |
307 for deltadata in gen.deltaiter(): | 314 for deltadata in gen.deltaiter(): |
308 node, p1, p2, cs, deltabase, delta, flags = deltadata | 315 node, p1, p2, cs, deltabase, delta, flags = deltadata |
309 ui.write( | 316 ui.write( |
310 "%s%s %s %s %s %s %d\n" | 317 b"%s%s %s %s %s %s %d\n" |
311 % ( | 318 % ( |
312 indent_string, | 319 indent_string, |
313 hex(node), | 320 hex(node), |
314 hex(p1), | 321 hex(p1), |
315 hex(p2), | 322 hex(p2), |
318 len(delta), | 325 len(delta), |
319 ) | 326 ) |
320 ) | 327 ) |
321 | 328 |
322 chunkdata = gen.changelogheader() | 329 chunkdata = gen.changelogheader() |
323 showchunks("changelog") | 330 showchunks(b"changelog") |
324 chunkdata = gen.manifestheader() | 331 chunkdata = gen.manifestheader() |
325 showchunks("manifest") | 332 showchunks(b"manifest") |
326 for chunkdata in iter(gen.filelogheader, {}): | 333 for chunkdata in iter(gen.filelogheader, {}): |
327 fname = chunkdata['filename'] | 334 fname = chunkdata[b'filename'] |
328 showchunks(fname) | 335 showchunks(fname) |
329 else: | 336 else: |
330 if isinstance(gen, bundle2.unbundle20): | 337 if isinstance(gen, bundle2.unbundle20): |
331 raise error.Abort(_('use debugbundle2 for this file')) | 338 raise error.Abort(_(b'use debugbundle2 for this file')) |
332 chunkdata = gen.changelogheader() | 339 chunkdata = gen.changelogheader() |
333 for deltadata in gen.deltaiter(): | 340 for deltadata in gen.deltaiter(): |
334 node, p1, p2, cs, deltabase, delta, flags = deltadata | 341 node, p1, p2, cs, deltabase, delta, flags = deltadata |
335 ui.write("%s%s\n" % (indent_string, hex(node))) | 342 ui.write(b"%s%s\n" % (indent_string, hex(node))) |
336 | 343 |
337 | 344 |
338 def _debugobsmarkers(ui, part, indent=0, **opts): | 345 def _debugobsmarkers(ui, part, indent=0, **opts): |
339 """display version and markers contained in 'data'""" | 346 """display version and markers contained in 'data'""" |
340 opts = pycompat.byteskwargs(opts) | 347 opts = pycompat.byteskwargs(opts) |
341 data = part.read() | 348 data = part.read() |
342 indent_string = ' ' * indent | 349 indent_string = b' ' * indent |
343 try: | 350 try: |
344 version, markers = obsolete._readmarkers(data) | 351 version, markers = obsolete._readmarkers(data) |
345 except error.UnknownVersion as exc: | 352 except error.UnknownVersion as exc: |
346 msg = "%sunsupported version: %s (%d bytes)\n" | 353 msg = b"%sunsupported version: %s (%d bytes)\n" |
347 msg %= indent_string, exc.version, len(data) | 354 msg %= indent_string, exc.version, len(data) |
348 ui.write(msg) | 355 ui.write(msg) |
349 else: | 356 else: |
350 msg = "%sversion: %d (%d bytes)\n" | 357 msg = b"%sversion: %d (%d bytes)\n" |
351 msg %= indent_string, version, len(data) | 358 msg %= indent_string, version, len(data) |
352 ui.write(msg) | 359 ui.write(msg) |
353 fm = ui.formatter('debugobsolete', opts) | 360 fm = ui.formatter(b'debugobsolete', opts) |
354 for rawmarker in sorted(markers): | 361 for rawmarker in sorted(markers): |
355 m = obsutil.marker(None, rawmarker) | 362 m = obsutil.marker(None, rawmarker) |
356 fm.startitem() | 363 fm.startitem() |
357 fm.plain(indent_string) | 364 fm.plain(indent_string) |
358 cmdutil.showmarker(fm, m) | 365 cmdutil.showmarker(fm, m) |
359 fm.end() | 366 fm.end() |
360 | 367 |
361 | 368 |
362 def _debugphaseheads(ui, data, indent=0): | 369 def _debugphaseheads(ui, data, indent=0): |
363 """display version and markers contained in 'data'""" | 370 """display version and markers contained in 'data'""" |
364 indent_string = ' ' * indent | 371 indent_string = b' ' * indent |
365 headsbyphase = phases.binarydecode(data) | 372 headsbyphase = phases.binarydecode(data) |
366 for phase in phases.allphases: | 373 for phase in phases.allphases: |
367 for head in headsbyphase[phase]: | 374 for head in headsbyphase[phase]: |
368 ui.write(indent_string) | 375 ui.write(indent_string) |
369 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase])) | 376 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase])) |
370 | 377 |
371 | 378 |
372 def _quasirepr(thing): | 379 def _quasirepr(thing): |
373 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)): | 380 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)): |
374 return '{%s}' % ( | 381 return b'{%s}' % ( |
375 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)) | 382 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)) |
376 ) | 383 ) |
377 return pycompat.bytestr(repr(thing)) | 384 return pycompat.bytestr(repr(thing)) |
378 | 385 |
379 | 386 |
380 def _debugbundle2(ui, gen, all=None, **opts): | 387 def _debugbundle2(ui, gen, all=None, **opts): |
381 """lists the contents of a bundle2""" | 388 """lists the contents of a bundle2""" |
382 if not isinstance(gen, bundle2.unbundle20): | 389 if not isinstance(gen, bundle2.unbundle20): |
383 raise error.Abort(_('not a bundle2 file')) | 390 raise error.Abort(_(b'not a bundle2 file')) |
384 ui.write(('Stream params: %s\n' % _quasirepr(gen.params))) | 391 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params))) |
385 parttypes = opts.get(r'part_type', []) | 392 parttypes = opts.get(r'part_type', []) |
386 for part in gen.iterparts(): | 393 for part in gen.iterparts(): |
387 if parttypes and part.type not in parttypes: | 394 if parttypes and part.type not in parttypes: |
388 continue | 395 continue |
389 msg = '%s -- %s (mandatory: %r)\n' | 396 msg = b'%s -- %s (mandatory: %r)\n' |
390 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory))) | 397 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory))) |
391 if part.type == 'changegroup': | 398 if part.type == b'changegroup': |
392 version = part.params.get('version', '01') | 399 version = part.params.get(b'version', b'01') |
393 cg = changegroup.getunbundler(version, part, 'UN') | 400 cg = changegroup.getunbundler(version, part, b'UN') |
394 if not ui.quiet: | 401 if not ui.quiet: |
395 _debugchangegroup(ui, cg, all=all, indent=4, **opts) | 402 _debugchangegroup(ui, cg, all=all, indent=4, **opts) |
396 if part.type == 'obsmarkers': | 403 if part.type == b'obsmarkers': |
397 if not ui.quiet: | 404 if not ui.quiet: |
398 _debugobsmarkers(ui, part, indent=4, **opts) | 405 _debugobsmarkers(ui, part, indent=4, **opts) |
399 if part.type == 'phase-heads': | 406 if part.type == b'phase-heads': |
400 if not ui.quiet: | 407 if not ui.quiet: |
401 _debugphaseheads(ui, part, indent=4) | 408 _debugphaseheads(ui, part, indent=4) |
402 | 409 |
403 | 410 |
404 @command( | 411 @command( |
405 'debugbundle', | 412 b'debugbundle', |
406 [ | 413 [ |
407 ('a', 'all', None, _('show all details')), | 414 (b'a', b'all', None, _(b'show all details')), |
408 ('', 'part-type', [], _('show only the named part type')), | 415 (b'', b'part-type', [], _(b'show only the named part type')), |
409 ('', 'spec', None, _('print the bundlespec of the bundle')), | 416 (b'', b'spec', None, _(b'print the bundlespec of the bundle')), |
410 ], | 417 ], |
411 _('FILE'), | 418 _(b'FILE'), |
412 norepo=True, | 419 norepo=True, |
413 ) | 420 ) |
414 def debugbundle(ui, bundlepath, all=None, spec=None, **opts): | 421 def debugbundle(ui, bundlepath, all=None, spec=None, **opts): |
415 """lists the contents of a bundle""" | 422 """lists the contents of a bundle""" |
416 with hg.openpath(ui, bundlepath) as f: | 423 with hg.openpath(ui, bundlepath) as f: |
417 if spec: | 424 if spec: |
418 spec = exchange.getbundlespec(ui, f) | 425 spec = exchange.getbundlespec(ui, f) |
419 ui.write('%s\n' % spec) | 426 ui.write(b'%s\n' % spec) |
420 return | 427 return |
421 | 428 |
422 gen = exchange.readbundle(ui, f, bundlepath) | 429 gen = exchange.readbundle(ui, f, bundlepath) |
423 if isinstance(gen, bundle2.unbundle20): | 430 if isinstance(gen, bundle2.unbundle20): |
424 return _debugbundle2(ui, gen, all=all, **opts) | 431 return _debugbundle2(ui, gen, all=all, **opts) |
425 _debugchangegroup(ui, gen, all=all, **opts) | 432 _debugchangegroup(ui, gen, all=all, **opts) |
426 | 433 |
427 | 434 |
428 @command('debugcapabilities', [], _('PATH'), norepo=True) | 435 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True) |
429 def debugcapabilities(ui, path, **opts): | 436 def debugcapabilities(ui, path, **opts): |
430 """lists the capabilities of a remote peer""" | 437 """lists the capabilities of a remote peer""" |
431 opts = pycompat.byteskwargs(opts) | 438 opts = pycompat.byteskwargs(opts) |
432 peer = hg.peer(ui, opts, path) | 439 peer = hg.peer(ui, opts, path) |
433 caps = peer.capabilities() | 440 caps = peer.capabilities() |
434 ui.write('Main capabilities:\n') | 441 ui.write(b'Main capabilities:\n') |
435 for c in sorted(caps): | 442 for c in sorted(caps): |
436 ui.write(' %s\n' % c) | 443 ui.write(b' %s\n' % c) |
437 b2caps = bundle2.bundle2caps(peer) | 444 b2caps = bundle2.bundle2caps(peer) |
438 if b2caps: | 445 if b2caps: |
439 ui.write('Bundle2 capabilities:\n') | 446 ui.write(b'Bundle2 capabilities:\n') |
440 for key, values in sorted(b2caps.iteritems()): | 447 for key, values in sorted(b2caps.iteritems()): |
441 ui.write(' %s\n' % key) | 448 ui.write(b' %s\n' % key) |
442 for v in values: | 449 for v in values: |
443 ui.write(' %s\n' % v) | 450 ui.write(b' %s\n' % v) |
444 | 451 |
445 | 452 |
446 @command('debugcheckstate', [], '') | 453 @command(b'debugcheckstate', [], b'') |
447 def debugcheckstate(ui, repo): | 454 def debugcheckstate(ui, repo): |
448 """validate the correctness of the current dirstate""" | 455 """validate the correctness of the current dirstate""" |
449 parent1, parent2 = repo.dirstate.parents() | 456 parent1, parent2 = repo.dirstate.parents() |
450 m1 = repo[parent1].manifest() | 457 m1 = repo[parent1].manifest() |
451 m2 = repo[parent2].manifest() | 458 m2 = repo[parent2].manifest() |
452 errors = 0 | 459 errors = 0 |
453 for f in repo.dirstate: | 460 for f in repo.dirstate: |
454 state = repo.dirstate[f] | 461 state = repo.dirstate[f] |
455 if state in "nr" and f not in m1: | 462 if state in b"nr" and f not in m1: |
456 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state)) | 463 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state)) |
457 errors += 1 | 464 errors += 1 |
458 if state in "a" and f in m1: | 465 if state in b"a" and f in m1: |
459 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state)) | 466 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state)) |
460 errors += 1 | 467 errors += 1 |
461 if state in "m" and f not in m1 and f not in m2: | 468 if state in b"m" and f not in m1 and f not in m2: |
462 ui.warn( | 469 ui.warn( |
463 _("%s in state %s, but not in either manifest\n") % (f, state) | 470 _(b"%s in state %s, but not in either manifest\n") % (f, state) |
464 ) | 471 ) |
465 errors += 1 | 472 errors += 1 |
466 for f in m1: | 473 for f in m1: |
467 state = repo.dirstate[f] | 474 state = repo.dirstate[f] |
468 if state not in "nrm": | 475 if state not in b"nrm": |
469 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state)) | 476 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state)) |
470 errors += 1 | 477 errors += 1 |
471 if errors: | 478 if errors: |
472 error = _(".hg/dirstate inconsistent with current parent's manifest") | 479 error = _(b".hg/dirstate inconsistent with current parent's manifest") |
473 raise error.Abort(error) | 480 raise error.Abort(error) |
474 | 481 |
475 | 482 |
476 @command( | 483 @command( |
477 'debugcolor', | 484 b'debugcolor', |
478 [('', 'style', None, _('show all configured styles'))], | 485 [(b'', b'style', None, _(b'show all configured styles'))], |
479 'hg debugcolor', | 486 b'hg debugcolor', |
480 ) | 487 ) |
481 def debugcolor(ui, repo, **opts): | 488 def debugcolor(ui, repo, **opts): |
482 """show available color, effects or style""" | 489 """show available color, effects or style""" |
483 ui.write('color mode: %s\n' % stringutil.pprint(ui._colormode)) | 490 ui.write(b'color mode: %s\n' % stringutil.pprint(ui._colormode)) |
484 if opts.get(r'style'): | 491 if opts.get(r'style'): |
485 return _debugdisplaystyle(ui) | 492 return _debugdisplaystyle(ui) |
486 else: | 493 else: |
487 return _debugdisplaycolor(ui) | 494 return _debugdisplaycolor(ui) |
488 | 495 |
491 ui = ui.copy() | 498 ui = ui.copy() |
492 ui._styles.clear() | 499 ui._styles.clear() |
493 for effect in color._activeeffects(ui).keys(): | 500 for effect in color._activeeffects(ui).keys(): |
494 ui._styles[effect] = effect | 501 ui._styles[effect] = effect |
495 if ui._terminfoparams: | 502 if ui._terminfoparams: |
496 for k, v in ui.configitems('color'): | 503 for k, v in ui.configitems(b'color'): |
497 if k.startswith('color.'): | 504 if k.startswith(b'color.'): |
498 ui._styles[k] = k[6:] | 505 ui._styles[k] = k[6:] |
499 elif k.startswith('terminfo.'): | 506 elif k.startswith(b'terminfo.'): |
500 ui._styles[k] = k[9:] | 507 ui._styles[k] = k[9:] |
501 ui.write(_('available colors:\n')) | 508 ui.write(_(b'available colors:\n')) |
502 # sort label with a '_' after the other to group '_background' entry. | 509 # sort label with a '_' after the other to group '_background' entry. |
503 items = sorted(ui._styles.items(), key=lambda i: ('_' in i[0], i[0], i[1])) | 510 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1])) |
504 for colorname, label in items: | 511 for colorname, label in items: |
505 ui.write('%s\n' % colorname, label=label) | 512 ui.write(b'%s\n' % colorname, label=label) |
506 | 513 |
507 | 514 |
508 def _debugdisplaystyle(ui): | 515 def _debugdisplaystyle(ui): |
509 ui.write(_('available style:\n')) | 516 ui.write(_(b'available style:\n')) |
510 if not ui._styles: | 517 if not ui._styles: |
511 return | 518 return |
512 width = max(len(s) for s in ui._styles) | 519 width = max(len(s) for s in ui._styles) |
513 for label, effects in sorted(ui._styles.items()): | 520 for label, effects in sorted(ui._styles.items()): |
514 ui.write('%s' % label, label=label) | 521 ui.write(b'%s' % label, label=label) |
515 if effects: | 522 if effects: |
516 # 50 | 523 # 50 |
517 ui.write(': ') | 524 ui.write(b': ') |
518 ui.write(' ' * (max(0, width - len(label)))) | 525 ui.write(b' ' * (max(0, width - len(label)))) |
519 ui.write(', '.join(ui.label(e, e) for e in effects.split())) | 526 ui.write(b', '.join(ui.label(e, e) for e in effects.split())) |
520 ui.write('\n') | 527 ui.write(b'\n') |
521 | 528 |
522 | 529 |
523 @command('debugcreatestreamclonebundle', [], 'FILE') | 530 @command(b'debugcreatestreamclonebundle', [], b'FILE') |
524 def debugcreatestreamclonebundle(ui, repo, fname): | 531 def debugcreatestreamclonebundle(ui, repo, fname): |
525 """create a stream clone bundle file | 532 """create a stream clone bundle file |
526 | 533 |
527 Stream bundles are special bundles that are essentially archives of | 534 Stream bundles are special bundles that are essentially archives of |
528 revlog files. They are commonly used for cloning very quickly. | 535 revlog files. They are commonly used for cloning very quickly. |
530 # TODO we may want to turn this into an abort when this functionality | 537 # TODO we may want to turn this into an abort when this functionality |
531 # is moved into `hg bundle`. | 538 # is moved into `hg bundle`. |
532 if phases.hassecret(repo): | 539 if phases.hassecret(repo): |
533 ui.warn( | 540 ui.warn( |
534 _( | 541 _( |
535 '(warning: stream clone bundle will contain secret ' | 542 b'(warning: stream clone bundle will contain secret ' |
536 'revisions)\n' | 543 b'revisions)\n' |
537 ) | 544 ) |
538 ) | 545 ) |
539 | 546 |
540 requirements, gen = streamclone.generatebundlev1(repo) | 547 requirements, gen = streamclone.generatebundlev1(repo) |
541 changegroup.writechunks(ui, gen, fname) | 548 changegroup.writechunks(ui, gen, fname) |
542 | 549 |
543 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements))) | 550 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements))) |
544 | 551 |
545 | 552 |
546 @command( | 553 @command( |
547 'debugdag', | 554 b'debugdag', |
548 [ | 555 [ |
549 ('t', 'tags', None, _('use tags as labels')), | 556 (b't', b'tags', None, _(b'use tags as labels')), |
550 ('b', 'branches', None, _('annotate with branch names')), | 557 (b'b', b'branches', None, _(b'annotate with branch names')), |
551 ('', 'dots', None, _('use dots for runs')), | 558 (b'', b'dots', None, _(b'use dots for runs')), |
552 ('s', 'spaces', None, _('separate elements by spaces')), | 559 (b's', b'spaces', None, _(b'separate elements by spaces')), |
553 ], | 560 ], |
554 _('[OPTION]... [FILE [REV]...]'), | 561 _(b'[OPTION]... [FILE [REV]...]'), |
555 optionalrepo=True, | 562 optionalrepo=True, |
556 ) | 563 ) |
557 def debugdag(ui, repo, file_=None, *revs, **opts): | 564 def debugdag(ui, repo, file_=None, *revs, **opts): |
558 """format the changelog or an index DAG as a concise textual description | 565 """format the changelog or an index DAG as a concise textual description |
559 | 566 |
568 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_) | 575 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_) |
569 revs = set((int(r) for r in revs)) | 576 revs = set((int(r) for r in revs)) |
570 | 577 |
571 def events(): | 578 def events(): |
572 for r in rlog: | 579 for r in rlog: |
573 yield 'n', (r, list(p for p in rlog.parentrevs(r) if p != -1)) | 580 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1)) |
574 if r in revs: | 581 if r in revs: |
575 yield 'l', (r, "r%i" % r) | 582 yield b'l', (r, b"r%i" % r) |
576 | 583 |
577 elif repo: | 584 elif repo: |
578 cl = repo.changelog | 585 cl = repo.changelog |
579 tags = opts.get(r'tags') | 586 tags = opts.get(r'tags') |
580 branches = opts.get(r'branches') | 587 branches = opts.get(r'branches') |
582 labels = {} | 589 labels = {} |
583 for l, n in repo.tags().items(): | 590 for l, n in repo.tags().items(): |
584 labels.setdefault(cl.rev(n), []).append(l) | 591 labels.setdefault(cl.rev(n), []).append(l) |
585 | 592 |
586 def events(): | 593 def events(): |
587 b = "default" | 594 b = b"default" |
588 for r in cl: | 595 for r in cl: |
589 if branches: | 596 if branches: |
590 newb = cl.read(cl.node(r))[5]['branch'] | 597 newb = cl.read(cl.node(r))[5][b'branch'] |
591 if newb != b: | 598 if newb != b: |
592 yield 'a', newb | 599 yield b'a', newb |
593 b = newb | 600 b = newb |
594 yield 'n', (r, list(p for p in cl.parentrevs(r) if p != -1)) | 601 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1)) |
595 if tags: | 602 if tags: |
596 ls = labels.get(r) | 603 ls = labels.get(r) |
597 if ls: | 604 if ls: |
598 for l in ls: | 605 for l in ls: |
599 yield 'l', (r, l) | 606 yield b'l', (r, l) |
600 | 607 |
601 else: | 608 else: |
602 raise error.Abort(_('need repo for changelog dag')) | 609 raise error.Abort(_(b'need repo for changelog dag')) |
603 | 610 |
604 for line in dagparser.dagtextlines( | 611 for line in dagparser.dagtextlines( |
605 events(), | 612 events(), |
606 addspaces=spaces, | 613 addspaces=spaces, |
607 wraplabels=True, | 614 wraplabels=True, |
609 wrapnonlinear=dots, | 616 wrapnonlinear=dots, |
610 usedots=dots, | 617 usedots=dots, |
611 maxlinewidth=70, | 618 maxlinewidth=70, |
612 ): | 619 ): |
613 ui.write(line) | 620 ui.write(line) |
614 ui.write("\n") | 621 ui.write(b"\n") |
615 | 622 |
616 | 623 |
617 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV')) | 624 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV')) |
618 def debugdata(ui, repo, file_, rev=None, **opts): | 625 def debugdata(ui, repo, file_, rev=None, **opts): |
619 """dump the contents of a data file revision""" | 626 """dump the contents of a data file revision""" |
620 opts = pycompat.byteskwargs(opts) | 627 opts = pycompat.byteskwargs(opts) |
621 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'): | 628 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'): |
622 if rev is not None: | 629 if rev is not None: |
623 raise error.CommandError('debugdata', _('invalid arguments')) | 630 raise error.CommandError(b'debugdata', _(b'invalid arguments')) |
624 file_, rev = None, file_ | 631 file_, rev = None, file_ |
625 elif rev is None: | 632 elif rev is None: |
626 raise error.CommandError('debugdata', _('invalid arguments')) | 633 raise error.CommandError(b'debugdata', _(b'invalid arguments')) |
627 r = cmdutil.openstorage(repo, 'debugdata', file_, opts) | 634 r = cmdutil.openstorage(repo, b'debugdata', file_, opts) |
628 try: | 635 try: |
629 ui.write(r.rawdata(r.lookup(rev))) | 636 ui.write(r.rawdata(r.lookup(rev))) |
630 except KeyError: | 637 except KeyError: |
631 raise error.Abort(_('invalid revision identifier %s') % rev) | 638 raise error.Abort(_(b'invalid revision identifier %s') % rev) |
632 | 639 |
633 | 640 |
634 @command( | 641 @command( |
635 'debugdate', | 642 b'debugdate', |
636 [('e', 'extended', None, _('try extended date formats'))], | 643 [(b'e', b'extended', None, _(b'try extended date formats'))], |
637 _('[-e] DATE [RANGE]'), | 644 _(b'[-e] DATE [RANGE]'), |
638 norepo=True, | 645 norepo=True, |
639 optionalrepo=True, | 646 optionalrepo=True, |
640 ) | 647 ) |
641 def debugdate(ui, date, range=None, **opts): | 648 def debugdate(ui, date, range=None, **opts): |
642 """parse and display a date""" | 649 """parse and display a date""" |
643 if opts[r"extended"]: | 650 if opts[r"extended"]: |
644 d = dateutil.parsedate(date, util.extendeddateformats) | 651 d = dateutil.parsedate(date, util.extendeddateformats) |
645 else: | 652 else: |
646 d = dateutil.parsedate(date) | 653 d = dateutil.parsedate(date) |
647 ui.write("internal: %d %d\n" % d) | 654 ui.write(b"internal: %d %d\n" % d) |
648 ui.write("standard: %s\n" % dateutil.datestr(d)) | 655 ui.write(b"standard: %s\n" % dateutil.datestr(d)) |
649 if range: | 656 if range: |
650 m = dateutil.matchdate(range) | 657 m = dateutil.matchdate(range) |
651 ui.write("match: %s\n" % m(d[0])) | 658 ui.write(b"match: %s\n" % m(d[0])) |
652 | 659 |
653 | 660 |
654 @command( | 661 @command( |
655 'debugdeltachain', | 662 b'debugdeltachain', |
656 cmdutil.debugrevlogopts + cmdutil.formatteropts, | 663 cmdutil.debugrevlogopts + cmdutil.formatteropts, |
657 _('-c|-m|FILE'), | 664 _(b'-c|-m|FILE'), |
658 optionalrepo=True, | 665 optionalrepo=True, |
659 ) | 666 ) |
660 def debugdeltachain(ui, repo, file_=None, **opts): | 667 def debugdeltachain(ui, repo, file_=None, **opts): |
661 """dump information about delta chains in a revlog | 668 """dump information about delta chains in a revlog |
662 | 669 |
691 :``srchunks``: in how many data hunks the whole revision would be read | 698 :``srchunks``: in how many data hunks the whole revision would be read |
692 | 699 |
693 The sparse read can be enabled with experimental.sparse-read = True | 700 The sparse read can be enabled with experimental.sparse-read = True |
694 """ | 701 """ |
695 opts = pycompat.byteskwargs(opts) | 702 opts = pycompat.byteskwargs(opts) |
696 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts) | 703 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts) |
697 index = r.index | 704 index = r.index |
698 start = r.start | 705 start = r.start |
699 length = r.length | 706 length = r.length |
700 generaldelta = r.version & revlog.FLAG_GENERALDELTA | 707 generaldelta = r.version & revlog.FLAG_GENERALDELTA |
701 withsparseread = getattr(r, '_withsparseread', False) | 708 withsparseread = getattr(r, '_withsparseread', False) |
706 uncompsize = e[2] | 713 uncompsize = e[2] |
707 chainsize = 0 | 714 chainsize = 0 |
708 | 715 |
709 if generaldelta: | 716 if generaldelta: |
710 if e[3] == e[5]: | 717 if e[3] == e[5]: |
711 deltatype = 'p1' | 718 deltatype = b'p1' |
712 elif e[3] == e[6]: | 719 elif e[3] == e[6]: |
713 deltatype = 'p2' | 720 deltatype = b'p2' |
714 elif e[3] == rev - 1: | 721 elif e[3] == rev - 1: |
715 deltatype = 'prev' | 722 deltatype = b'prev' |
716 elif e[3] == rev: | 723 elif e[3] == rev: |
717 deltatype = 'base' | 724 deltatype = b'base' |
718 else: | 725 else: |
719 deltatype = 'other' | 726 deltatype = b'other' |
720 else: | 727 else: |
721 if e[3] == rev: | 728 if e[3] == rev: |
722 deltatype = 'base' | 729 deltatype = b'base' |
723 else: | 730 else: |
724 deltatype = 'prev' | 731 deltatype = b'prev' |
725 | 732 |
726 chain = r._deltachain(rev)[0] | 733 chain = r._deltachain(rev)[0] |
727 for iterrev in chain: | 734 for iterrev in chain: |
728 e = index[iterrev] | 735 e = index[iterrev] |
729 chainsize += e[1] | 736 chainsize += e[1] |
730 | 737 |
731 return compsize, uncompsize, deltatype, chain, chainsize | 738 return compsize, uncompsize, deltatype, chain, chainsize |
732 | 739 |
733 fm = ui.formatter('debugdeltachain', opts) | 740 fm = ui.formatter(b'debugdeltachain', opts) |
734 | 741 |
735 fm.plain( | 742 fm.plain( |
736 ' rev chain# chainlen prev delta ' | 743 b' rev chain# chainlen prev delta ' |
737 'size rawsize chainsize ratio lindist extradist ' | 744 b'size rawsize chainsize ratio lindist extradist ' |
738 'extraratio' | 745 b'extraratio' |
739 ) | 746 ) |
740 if withsparseread: | 747 if withsparseread: |
741 fm.plain(' readsize largestblk rddensity srchunks') | 748 fm.plain(b' readsize largestblk rddensity srchunks') |
742 fm.plain('\n') | 749 fm.plain(b'\n') |
743 | 750 |
744 chainbases = {} | 751 chainbases = {} |
745 for rev in r: | 752 for rev in r: |
746 comp, uncomp, deltatype, chain, chainsize = revinfo(rev) | 753 comp, uncomp, deltatype, chain, chainsize = revinfo(rev) |
747 chainbase = chain[0] | 754 chainbase = chain[0] |
765 else: | 772 else: |
766 extraratio = extradist | 773 extraratio = extradist |
767 | 774 |
768 fm.startitem() | 775 fm.startitem() |
769 fm.write( | 776 fm.write( |
770 'rev chainid chainlen prevrev deltatype compsize ' | 777 b'rev chainid chainlen prevrev deltatype compsize ' |
771 'uncompsize chainsize chainratio lindist extradist ' | 778 b'uncompsize chainsize chainratio lindist extradist ' |
772 'extraratio', | 779 b'extraratio', |
773 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f', | 780 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f', |
774 rev, | 781 rev, |
775 chainid, | 782 chainid, |
776 len(chain), | 783 len(chain), |
777 prevrev, | 784 prevrev, |
778 deltatype, | 785 deltatype, |
814 readdensity = float(chainsize) / float(readsize) | 821 readdensity = float(chainsize) / float(readsize) |
815 else: | 822 else: |
816 readdensity = 1 | 823 readdensity = 1 |
817 | 824 |
818 fm.write( | 825 fm.write( |
819 'readsize largestblock readdensity srchunks', | 826 b'readsize largestblock readdensity srchunks', |
820 ' %10d %10d %9.5f %8d', | 827 b' %10d %10d %9.5f %8d', |
821 readsize, | 828 readsize, |
822 largestblock, | 829 largestblock, |
823 readdensity, | 830 readdensity, |
824 srchunks, | 831 srchunks, |
825 readsize=readsize, | 832 readsize=readsize, |
826 largestblock=largestblock, | 833 largestblock=largestblock, |
827 readdensity=readdensity, | 834 readdensity=readdensity, |
828 srchunks=srchunks, | 835 srchunks=srchunks, |
829 ) | 836 ) |
830 | 837 |
831 fm.plain('\n') | 838 fm.plain(b'\n') |
832 | 839 |
833 fm.end() | 840 fm.end() |
834 | 841 |
835 | 842 |
836 @command( | 843 @command( |
837 'debugdirstate|debugstate', | 844 b'debugdirstate|debugstate', |
838 [ | 845 [ |
839 ('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')), | 846 ( |
840 ('', 'dates', True, _('display the saved mtime')), | 847 b'', |
841 ('', 'datesort', None, _('sort by saved mtime')), | 848 b'nodates', |
849 None, | |
850 _(b'do not display the saved mtime (DEPRECATED)'), | |
851 ), | |
852 (b'', b'dates', True, _(b'display the saved mtime')), | |
853 (b'', b'datesort', None, _(b'sort by saved mtime')), | |
842 ], | 854 ], |
843 _('[OPTION]...'), | 855 _(b'[OPTION]...'), |
844 ) | 856 ) |
845 def debugstate(ui, repo, **opts): | 857 def debugstate(ui, repo, **opts): |
846 """show the contents of the current dirstate""" | 858 """show the contents of the current dirstate""" |
847 | 859 |
848 nodates = not opts[r'dates'] | 860 nodates = not opts[r'dates'] |
854 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename | 866 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename |
855 else: | 867 else: |
856 keyfunc = None # sort by filename | 868 keyfunc = None # sort by filename |
857 for file_, ent in sorted(repo.dirstate.iteritems(), key=keyfunc): | 869 for file_, ent in sorted(repo.dirstate.iteritems(), key=keyfunc): |
858 if ent[3] == -1: | 870 if ent[3] == -1: |
859 timestr = 'unset ' | 871 timestr = b'unset ' |
860 elif nodates: | 872 elif nodates: |
861 timestr = 'set ' | 873 timestr = b'set ' |
862 else: | 874 else: |
863 timestr = time.strftime( | 875 timestr = time.strftime( |
864 r"%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]) | 876 r"%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]) |
865 ) | 877 ) |
866 timestr = encoding.strtolocal(timestr) | 878 timestr = encoding.strtolocal(timestr) |
867 if ent[1] & 0o20000: | 879 if ent[1] & 0o20000: |
868 mode = 'lnk' | 880 mode = b'lnk' |
869 else: | 881 else: |
870 mode = '%3o' % (ent[1] & 0o777 & ~util.umask) | 882 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask) |
871 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_)) | 883 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_)) |
872 for f in repo.dirstate.copies(): | 884 for f in repo.dirstate.copies(): |
873 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f)) | 885 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f)) |
874 | 886 |
875 | 887 |
876 @command( | 888 @command( |
877 'debugdiscovery', | 889 b'debugdiscovery', |
878 [ | 890 [ |
879 ('', 'old', None, _('use old-style discovery')), | 891 (b'', b'old', None, _(b'use old-style discovery')), |
880 ( | 892 ( |
881 '', | 893 b'', |
882 'nonheads', | 894 b'nonheads', |
883 None, | 895 None, |
884 _('use old-style discovery with non-heads included'), | 896 _(b'use old-style discovery with non-heads included'), |
885 ), | 897 ), |
886 ('', 'rev', [], 'restrict discovery to this set of revs'), | 898 (b'', b'rev', [], b'restrict discovery to this set of revs'), |
887 ('', 'seed', '12323', 'specify the random seed use for discovery'), | 899 (b'', b'seed', b'12323', b'specify the random seed use for discovery'), |
888 ] | 900 ] |
889 + cmdutil.remoteopts, | 901 + cmdutil.remoteopts, |
890 _('[--rev REV] [OTHER]'), | 902 _(b'[--rev REV] [OTHER]'), |
891 ) | 903 ) |
892 def debugdiscovery(ui, repo, remoteurl="default", **opts): | 904 def debugdiscovery(ui, repo, remoteurl=b"default", **opts): |
893 """runs the changeset discovery protocol in isolation""" | 905 """runs the changeset discovery protocol in isolation""" |
894 opts = pycompat.byteskwargs(opts) | 906 opts = pycompat.byteskwargs(opts) |
895 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl)) | 907 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl)) |
896 remote = hg.peer(repo, opts, remoteurl) | 908 remote = hg.peer(repo, opts, remoteurl) |
897 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl)) | 909 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl)) |
898 | 910 |
899 # make sure tests are repeatable | 911 # make sure tests are repeatable |
900 random.seed(int(opts['seed'])) | 912 random.seed(int(opts[b'seed'])) |
901 | 913 |
902 if opts.get('old'): | 914 if opts.get(b'old'): |
903 | 915 |
904 def doit(pushedrevs, remoteheads, remote=remote): | 916 def doit(pushedrevs, remoteheads, remote=remote): |
905 if not util.safehasattr(remote, 'branches'): | 917 if not util.safehasattr(remote, b'branches'): |
906 # enable in-client legacy support | 918 # enable in-client legacy support |
907 remote = localrepo.locallegacypeer(remote.local()) | 919 remote = localrepo.locallegacypeer(remote.local()) |
908 common, _in, hds = treediscovery.findcommonincoming( | 920 common, _in, hds = treediscovery.findcommonincoming( |
909 repo, remote, force=True | 921 repo, remote, force=True |
910 ) | 922 ) |
911 common = set(common) | 923 common = set(common) |
912 if not opts.get('nonheads'): | 924 if not opts.get(b'nonheads'): |
913 ui.write( | 925 ui.write( |
914 "unpruned common: %s\n" | 926 b"unpruned common: %s\n" |
915 % " ".join(sorted(short(n) for n in common)) | 927 % b" ".join(sorted(short(n) for n in common)) |
916 ) | 928 ) |
917 | 929 |
918 clnode = repo.changelog.node | 930 clnode = repo.changelog.node |
919 common = repo.revs('heads(::%ln)', common) | 931 common = repo.revs(b'heads(::%ln)', common) |
920 common = {clnode(r) for r in common} | 932 common = {clnode(r) for r in common} |
921 return common, hds | 933 return common, hds |
922 | 934 |
923 else: | 935 else: |
924 | 936 |
931 ui, repo, remote, ancestorsof=nodes | 943 ui, repo, remote, ancestorsof=nodes |
932 ) | 944 ) |
933 return common, hds | 945 return common, hds |
934 | 946 |
935 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None) | 947 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None) |
936 localrevs = opts['rev'] | 948 localrevs = opts[b'rev'] |
937 with util.timedcm('debug-discovery') as t: | 949 with util.timedcm(b'debug-discovery') as t: |
938 common, hds = doit(localrevs, remoterevs) | 950 common, hds = doit(localrevs, remoterevs) |
939 | 951 |
940 # compute all statistics | 952 # compute all statistics |
941 common = set(common) | 953 common = set(common) |
942 rheads = set(hds) | 954 rheads = set(hds) |
943 lheads = set(repo.heads()) | 955 lheads = set(repo.heads()) |
944 | 956 |
945 data = {} | 957 data = {} |
946 data['elapsed'] = t.elapsed | 958 data[b'elapsed'] = t.elapsed |
947 data['nb-common'] = len(common) | 959 data[b'nb-common'] = len(common) |
948 data['nb-common-local'] = len(common & lheads) | 960 data[b'nb-common-local'] = len(common & lheads) |
949 data['nb-common-remote'] = len(common & rheads) | 961 data[b'nb-common-remote'] = len(common & rheads) |
950 data['nb-common-both'] = len(common & rheads & lheads) | 962 data[b'nb-common-both'] = len(common & rheads & lheads) |
951 data['nb-local'] = len(lheads) | 963 data[b'nb-local'] = len(lheads) |
952 data['nb-local-missing'] = data['nb-local'] - data['nb-common-local'] | 964 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local'] |
953 data['nb-remote'] = len(rheads) | 965 data[b'nb-remote'] = len(rheads) |
954 data['nb-remote-unknown'] = data['nb-remote'] - data['nb-common-remote'] | 966 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote'] |
955 data['nb-revs'] = len(repo.revs('all()')) | 967 data[b'nb-revs'] = len(repo.revs(b'all()')) |
956 data['nb-revs-common'] = len(repo.revs('::%ln', common)) | 968 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common)) |
957 data['nb-revs-missing'] = data['nb-revs'] - data['nb-revs-common'] | 969 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common'] |
958 | 970 |
959 # display discovery summary | 971 # display discovery summary |
960 ui.write("elapsed time: %(elapsed)f seconds\n" % data) | 972 ui.write(b"elapsed time: %(elapsed)f seconds\n" % data) |
961 ui.write("heads summary:\n") | 973 ui.write(b"heads summary:\n") |
962 ui.write(" total common heads: %(nb-common)9d\n" % data) | 974 ui.write(b" total common heads: %(nb-common)9d\n" % data) |
963 ui.write(" also local heads: %(nb-common-local)9d\n" % data) | 975 ui.write(b" also local heads: %(nb-common-local)9d\n" % data) |
964 ui.write(" also remote heads: %(nb-common-remote)9d\n" % data) | 976 ui.write(b" also remote heads: %(nb-common-remote)9d\n" % data) |
965 ui.write(" both: %(nb-common-both)9d\n" % data) | 977 ui.write(b" both: %(nb-common-both)9d\n" % data) |
966 ui.write(" local heads: %(nb-local)9d\n" % data) | 978 ui.write(b" local heads: %(nb-local)9d\n" % data) |
967 ui.write(" common: %(nb-common-local)9d\n" % data) | 979 ui.write(b" common: %(nb-common-local)9d\n" % data) |
968 ui.write(" missing: %(nb-local-missing)9d\n" % data) | 980 ui.write(b" missing: %(nb-local-missing)9d\n" % data) |
969 ui.write(" remote heads: %(nb-remote)9d\n" % data) | 981 ui.write(b" remote heads: %(nb-remote)9d\n" % data) |
970 ui.write(" common: %(nb-common-remote)9d\n" % data) | 982 ui.write(b" common: %(nb-common-remote)9d\n" % data) |
971 ui.write(" unknown: %(nb-remote-unknown)9d\n" % data) | 983 ui.write(b" unknown: %(nb-remote-unknown)9d\n" % data) |
972 ui.write("local changesets: %(nb-revs)9d\n" % data) | 984 ui.write(b"local changesets: %(nb-revs)9d\n" % data) |
973 ui.write(" common: %(nb-revs-common)9d\n" % data) | 985 ui.write(b" common: %(nb-revs-common)9d\n" % data) |
974 ui.write(" missing: %(nb-revs-missing)9d\n" % data) | 986 ui.write(b" missing: %(nb-revs-missing)9d\n" % data) |
975 | 987 |
976 if ui.verbose: | 988 if ui.verbose: |
977 ui.write( | 989 ui.write( |
978 "common heads: %s\n" % " ".join(sorted(short(n) for n in common)) | 990 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common)) |
979 ) | 991 ) |
980 | 992 |
981 | 993 |
982 _chunksize = 4 << 10 | 994 _chunksize = 4 << 10 |
983 | 995 |
984 | 996 |
985 @command('debugdownload', [('o', 'output', '', _('path')),], optionalrepo=True) | 997 @command( |
998 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True | |
999 ) | |
986 def debugdownload(ui, repo, url, output=None, **opts): | 1000 def debugdownload(ui, repo, url, output=None, **opts): |
987 """download a resource using Mercurial logic and config | 1001 """download a resource using Mercurial logic and config |
988 """ | 1002 """ |
989 fh = urlmod.open(ui, url, output) | 1003 fh = urlmod.open(ui, url, output) |
990 | 1004 |
991 dest = ui | 1005 dest = ui |
992 if output: | 1006 if output: |
993 dest = open(output, "wb", _chunksize) | 1007 dest = open(output, b"wb", _chunksize) |
994 try: | 1008 try: |
995 data = fh.read(_chunksize) | 1009 data = fh.read(_chunksize) |
996 while data: | 1010 while data: |
997 dest.write(data) | 1011 dest.write(data) |
998 data = fh.read(_chunksize) | 1012 data = fh.read(_chunksize) |
999 finally: | 1013 finally: |
1000 if output: | 1014 if output: |
1001 dest.close() | 1015 dest.close() |
1002 | 1016 |
1003 | 1017 |
1004 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True) | 1018 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True) |
1005 def debugextensions(ui, repo, **opts): | 1019 def debugextensions(ui, repo, **opts): |
1006 '''show information about active extensions''' | 1020 '''show information about active extensions''' |
1007 opts = pycompat.byteskwargs(opts) | 1021 opts = pycompat.byteskwargs(opts) |
1008 exts = extensions.extensions(ui) | 1022 exts = extensions.extensions(ui) |
1009 hgver = util.version() | 1023 hgver = util.version() |
1010 fm = ui.formatter('debugextensions', opts) | 1024 fm = ui.formatter(b'debugextensions', opts) |
1011 for extname, extmod in sorted(exts, key=operator.itemgetter(0)): | 1025 for extname, extmod in sorted(exts, key=operator.itemgetter(0)): |
1012 isinternal = extensions.ismoduleinternal(extmod) | 1026 isinternal = extensions.ismoduleinternal(extmod) |
1013 extsource = pycompat.fsencode(extmod.__file__) | 1027 extsource = pycompat.fsencode(extmod.__file__) |
1014 if isinternal: | 1028 if isinternal: |
1015 exttestedwith = [] # never expose magic string to users | 1029 exttestedwith = [] # never expose magic string to users |
1016 else: | 1030 else: |
1017 exttestedwith = getattr(extmod, 'testedwith', '').split() | 1031 exttestedwith = getattr(extmod, 'testedwith', b'').split() |
1018 extbuglink = getattr(extmod, 'buglink', None) | 1032 extbuglink = getattr(extmod, 'buglink', None) |
1019 | 1033 |
1020 fm.startitem() | 1034 fm.startitem() |
1021 | 1035 |
1022 if ui.quiet or ui.verbose: | 1036 if ui.quiet or ui.verbose: |
1023 fm.write('name', '%s\n', extname) | 1037 fm.write(b'name', b'%s\n', extname) |
1024 else: | 1038 else: |
1025 fm.write('name', '%s', extname) | 1039 fm.write(b'name', b'%s', extname) |
1026 if isinternal or hgver in exttestedwith: | 1040 if isinternal or hgver in exttestedwith: |
1027 fm.plain('\n') | 1041 fm.plain(b'\n') |
1028 elif not exttestedwith: | 1042 elif not exttestedwith: |
1029 fm.plain(_(' (untested!)\n')) | 1043 fm.plain(_(b' (untested!)\n')) |
1030 else: | 1044 else: |
1031 lasttestedversion = exttestedwith[-1] | 1045 lasttestedversion = exttestedwith[-1] |
1032 fm.plain(' (%s!)\n' % lasttestedversion) | 1046 fm.plain(b' (%s!)\n' % lasttestedversion) |
1033 | 1047 |
1034 fm.condwrite( | 1048 fm.condwrite( |
1035 ui.verbose and extsource, | 1049 ui.verbose and extsource, |
1036 'source', | 1050 b'source', |
1037 _(' location: %s\n'), | 1051 _(b' location: %s\n'), |
1038 extsource or "", | 1052 extsource or b"", |
1039 ) | 1053 ) |
1040 | 1054 |
1041 if ui.verbose: | 1055 if ui.verbose: |
1042 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal]) | 1056 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal]) |
1043 fm.data(bundled=isinternal) | 1057 fm.data(bundled=isinternal) |
1044 | 1058 |
1045 fm.condwrite( | 1059 fm.condwrite( |
1046 ui.verbose and exttestedwith, | 1060 ui.verbose and exttestedwith, |
1047 'testedwith', | 1061 b'testedwith', |
1048 _(' tested with: %s\n'), | 1062 _(b' tested with: %s\n'), |
1049 fm.formatlist(exttestedwith, name='ver'), | 1063 fm.formatlist(exttestedwith, name=b'ver'), |
1050 ) | 1064 ) |
1051 | 1065 |
1052 fm.condwrite( | 1066 fm.condwrite( |
1053 ui.verbose and extbuglink, | 1067 ui.verbose and extbuglink, |
1054 'buglink', | 1068 b'buglink', |
1055 _(' bug reporting: %s\n'), | 1069 _(b' bug reporting: %s\n'), |
1056 extbuglink or "", | 1070 extbuglink or b"", |
1057 ) | 1071 ) |
1058 | 1072 |
1059 fm.end() | 1073 fm.end() |
1060 | 1074 |
1061 | 1075 |
1062 @command( | 1076 @command( |
1063 'debugfileset', | 1077 b'debugfileset', |
1064 [ | 1078 [ |
1065 ('r', 'rev', '', _('apply the filespec on this revision'), _('REV')), | |
1066 ( | 1079 ( |
1067 '', | 1080 b'r', |
1068 'all-files', | 1081 b'rev', |
1069 False, | 1082 b'', |
1070 _('test files from all revisions and working directory'), | 1083 _(b'apply the filespec on this revision'), |
1084 _(b'REV'), | |
1071 ), | 1085 ), |
1072 ( | 1086 ( |
1073 's', | 1087 b'', |
1074 'show-matcher', | 1088 b'all-files', |
1075 None, | 1089 False, |
1076 _('print internal representation of matcher'), | 1090 _(b'test files from all revisions and working directory'), |
1077 ), | 1091 ), |
1078 ( | 1092 ( |
1079 'p', | 1093 b's', |
1080 'show-stage', | 1094 b'show-matcher', |
1095 None, | |
1096 _(b'print internal representation of matcher'), | |
1097 ), | |
1098 ( | |
1099 b'p', | |
1100 b'show-stage', | |
1081 [], | 1101 [], |
1082 _('print parsed tree at the given stage'), | 1102 _(b'print parsed tree at the given stage'), |
1083 _('NAME'), | 1103 _(b'NAME'), |
1084 ), | 1104 ), |
1085 ], | 1105 ], |
1086 _('[-r REV] [--all-files] [OPTION]... FILESPEC'), | 1106 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'), |
1087 ) | 1107 ) |
1088 def debugfileset(ui, repo, expr, **opts): | 1108 def debugfileset(ui, repo, expr, **opts): |
1089 '''parse and apply a fileset specification''' | 1109 '''parse and apply a fileset specification''' |
1090 from . import fileset | 1110 from . import fileset |
1091 | 1111 |
1092 fileset.symbols # force import of fileset so we have predicates to optimize | 1112 fileset.symbols # force import of fileset so we have predicates to optimize |
1093 opts = pycompat.byteskwargs(opts) | 1113 opts = pycompat.byteskwargs(opts) |
1094 ctx = scmutil.revsingle(repo, opts.get('rev'), None) | 1114 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None) |
1095 | 1115 |
1096 stages = [ | 1116 stages = [ |
1097 ('parsed', pycompat.identity), | 1117 (b'parsed', pycompat.identity), |
1098 ('analyzed', filesetlang.analyze), | 1118 (b'analyzed', filesetlang.analyze), |
1099 ('optimized', filesetlang.optimize), | 1119 (b'optimized', filesetlang.optimize), |
1100 ] | 1120 ] |
1101 stagenames = set(n for n, f in stages) | 1121 stagenames = set(n for n, f in stages) |
1102 | 1122 |
1103 showalways = set() | 1123 showalways = set() |
1104 if ui.verbose and not opts['show_stage']: | 1124 if ui.verbose and not opts[b'show_stage']: |
1105 # show parsed tree by --verbose (deprecated) | 1125 # show parsed tree by --verbose (deprecated) |
1106 showalways.add('parsed') | 1126 showalways.add(b'parsed') |
1107 if opts['show_stage'] == ['all']: | 1127 if opts[b'show_stage'] == [b'all']: |
1108 showalways.update(stagenames) | 1128 showalways.update(stagenames) |
1109 else: | 1129 else: |
1110 for n in opts['show_stage']: | 1130 for n in opts[b'show_stage']: |
1111 if n not in stagenames: | 1131 if n not in stagenames: |
1112 raise error.Abort(_('invalid stage name: %s') % n) | 1132 raise error.Abort(_(b'invalid stage name: %s') % n) |
1113 showalways.update(opts['show_stage']) | 1133 showalways.update(opts[b'show_stage']) |
1114 | 1134 |
1115 tree = filesetlang.parse(expr) | 1135 tree = filesetlang.parse(expr) |
1116 for n, f in stages: | 1136 for n, f in stages: |
1117 tree = f(tree) | 1137 tree = f(tree) |
1118 if n in showalways: | 1138 if n in showalways: |
1119 if opts['show_stage'] or n != 'parsed': | 1139 if opts[b'show_stage'] or n != b'parsed': |
1120 ui.write("* %s:\n" % n) | 1140 ui.write(b"* %s:\n" % n) |
1121 ui.write(filesetlang.prettyformat(tree), "\n") | 1141 ui.write(filesetlang.prettyformat(tree), b"\n") |
1122 | 1142 |
1123 files = set() | 1143 files = set() |
1124 if opts['all_files']: | 1144 if opts[b'all_files']: |
1125 for r in repo: | 1145 for r in repo: |
1126 c = repo[r] | 1146 c = repo[r] |
1127 files.update(c.files()) | 1147 files.update(c.files()) |
1128 files.update(c.substate) | 1148 files.update(c.substate) |
1129 if opts['all_files'] or ctx.rev() is None: | 1149 if opts[b'all_files'] or ctx.rev() is None: |
1130 wctx = repo[None] | 1150 wctx = repo[None] |
1131 files.update( | 1151 files.update( |
1132 repo.dirstate.walk( | 1152 repo.dirstate.walk( |
1133 scmutil.matchall(repo), | 1153 scmutil.matchall(repo), |
1134 subrepos=list(wctx.substate), | 1154 subrepos=list(wctx.substate), |
1140 else: | 1160 else: |
1141 files.update(ctx.files()) | 1161 files.update(ctx.files()) |
1142 files.update(ctx.substate) | 1162 files.update(ctx.substate) |
1143 | 1163 |
1144 m = ctx.matchfileset(expr) | 1164 m = ctx.matchfileset(expr) |
1145 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose): | 1165 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose): |
1146 ui.write('* matcher:\n', stringutil.prettyrepr(m), '\n') | 1166 ui.write(b'* matcher:\n', stringutil.prettyrepr(m), b'\n') |
1147 for f in sorted(files): | 1167 for f in sorted(files): |
1148 if not m(f): | 1168 if not m(f): |
1149 continue | 1169 continue |
1150 ui.write("%s\n" % f) | 1170 ui.write(b"%s\n" % f) |
1151 | 1171 |
1152 | 1172 |
1153 @command('debugformat', [] + cmdutil.formatteropts) | 1173 @command(b'debugformat', [] + cmdutil.formatteropts) |
1154 def debugformat(ui, repo, **opts): | 1174 def debugformat(ui, repo, **opts): |
1155 """display format information about the current repository | 1175 """display format information about the current repository |
1156 | 1176 |
1157 Use --verbose to get extra information about current config value and | 1177 Use --verbose to get extra information about current config value and |
1158 Mercurial default.""" | 1178 Mercurial default.""" |
1159 opts = pycompat.byteskwargs(opts) | 1179 opts = pycompat.byteskwargs(opts) |
1160 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant) | 1180 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant) |
1161 maxvariantlength = max(len('format-variant'), maxvariantlength) | 1181 maxvariantlength = max(len(b'format-variant'), maxvariantlength) |
1162 | 1182 |
1163 def makeformatname(name): | 1183 def makeformatname(name): |
1164 return '%s:' + (' ' * (maxvariantlength - len(name))) | 1184 return b'%s:' + (b' ' * (maxvariantlength - len(name))) |
1165 | 1185 |
1166 fm = ui.formatter('debugformat', opts) | 1186 fm = ui.formatter(b'debugformat', opts) |
1167 if fm.isplain(): | 1187 if fm.isplain(): |
1168 | 1188 |
1169 def formatvalue(value): | 1189 def formatvalue(value): |
1170 if util.safehasattr(value, 'startswith'): | 1190 if util.safehasattr(value, b'startswith'): |
1171 return value | 1191 return value |
1172 if value: | 1192 if value: |
1173 return 'yes' | 1193 return b'yes' |
1174 else: | 1194 else: |
1175 return 'no' | 1195 return b'no' |
1176 | 1196 |
1177 else: | 1197 else: |
1178 formatvalue = pycompat.identity | 1198 formatvalue = pycompat.identity |
1179 | 1199 |
1180 fm.plain('format-variant') | 1200 fm.plain(b'format-variant') |
1181 fm.plain(' ' * (maxvariantlength - len('format-variant'))) | 1201 fm.plain(b' ' * (maxvariantlength - len(b'format-variant'))) |
1182 fm.plain(' repo') | 1202 fm.plain(b' repo') |
1183 if ui.verbose: | 1203 if ui.verbose: |
1184 fm.plain(' config default') | 1204 fm.plain(b' config default') |
1185 fm.plain('\n') | 1205 fm.plain(b'\n') |
1186 for fv in upgrade.allformatvariant: | 1206 for fv in upgrade.allformatvariant: |
1187 fm.startitem() | 1207 fm.startitem() |
1188 repovalue = fv.fromrepo(repo) | 1208 repovalue = fv.fromrepo(repo) |
1189 configvalue = fv.fromconfig(repo) | 1209 configvalue = fv.fromconfig(repo) |
1190 | 1210 |
1191 if repovalue != configvalue: | 1211 if repovalue != configvalue: |
1192 namelabel = 'formatvariant.name.mismatchconfig' | 1212 namelabel = b'formatvariant.name.mismatchconfig' |
1193 repolabel = 'formatvariant.repo.mismatchconfig' | 1213 repolabel = b'formatvariant.repo.mismatchconfig' |
1194 elif repovalue != fv.default: | 1214 elif repovalue != fv.default: |
1195 namelabel = 'formatvariant.name.mismatchdefault' | 1215 namelabel = b'formatvariant.name.mismatchdefault' |
1196 repolabel = 'formatvariant.repo.mismatchdefault' | 1216 repolabel = b'formatvariant.repo.mismatchdefault' |
1197 else: | 1217 else: |
1198 namelabel = 'formatvariant.name.uptodate' | 1218 namelabel = b'formatvariant.name.uptodate' |
1199 repolabel = 'formatvariant.repo.uptodate' | 1219 repolabel = b'formatvariant.repo.uptodate' |
1200 | 1220 |
1201 fm.write('name', makeformatname(fv.name), fv.name, label=namelabel) | 1221 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel) |
1202 fm.write('repo', ' %3s', formatvalue(repovalue), label=repolabel) | 1222 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel) |
1203 if fv.default != configvalue: | 1223 if fv.default != configvalue: |
1204 configlabel = 'formatvariant.config.special' | 1224 configlabel = b'formatvariant.config.special' |
1205 else: | 1225 else: |
1206 configlabel = 'formatvariant.config.default' | 1226 configlabel = b'formatvariant.config.default' |
1207 fm.condwrite( | 1227 fm.condwrite( |
1208 ui.verbose, | 1228 ui.verbose, |
1209 'config', | 1229 b'config', |
1210 ' %6s', | 1230 b' %6s', |
1211 formatvalue(configvalue), | 1231 formatvalue(configvalue), |
1212 label=configlabel, | 1232 label=configlabel, |
1213 ) | 1233 ) |
1214 fm.condwrite( | 1234 fm.condwrite( |
1215 ui.verbose, | 1235 ui.verbose, |
1216 'default', | 1236 b'default', |
1217 ' %7s', | 1237 b' %7s', |
1218 formatvalue(fv.default), | 1238 formatvalue(fv.default), |
1219 label='formatvariant.default', | 1239 label=b'formatvariant.default', |
1220 ) | 1240 ) |
1221 fm.plain('\n') | 1241 fm.plain(b'\n') |
1222 fm.end() | 1242 fm.end() |
1223 | 1243 |
1224 | 1244 |
1225 @command('debugfsinfo', [], _('[PATH]'), norepo=True) | 1245 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True) |
1226 def debugfsinfo(ui, path="."): | 1246 def debugfsinfo(ui, path=b"."): |
1227 """show information detected about current filesystem""" | 1247 """show information detected about current filesystem""" |
1228 ui.write('path: %s\n' % path) | 1248 ui.write(b'path: %s\n' % path) |
1229 ui.write('mounted on: %s\n' % (util.getfsmountpoint(path) or '(unknown)')) | 1249 ui.write(b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')) |
1230 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no')) | 1250 ui.write(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no')) |
1231 ui.write('fstype: %s\n' % (util.getfstype(path) or '(unknown)')) | 1251 ui.write(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)')) |
1232 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no')) | 1252 ui.write(b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')) |
1233 ui.write('hardlink: %s\n' % (util.checknlink(path) and 'yes' or 'no')) | 1253 ui.write(b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')) |
1234 casesensitive = '(unknown)' | 1254 casesensitive = b'(unknown)' |
1235 try: | 1255 try: |
1236 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f: | 1256 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f: |
1237 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no' | 1257 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no' |
1238 except OSError: | 1258 except OSError: |
1239 pass | 1259 pass |
1240 ui.write('case-sensitive: %s\n' % casesensitive) | 1260 ui.write(b'case-sensitive: %s\n' % casesensitive) |
1241 | 1261 |
1242 | 1262 |
1243 @command( | 1263 @command( |
1244 'debuggetbundle', | 1264 b'debuggetbundle', |
1245 [ | 1265 [ |
1246 ('H', 'head', [], _('id of head node'), _('ID')), | 1266 (b'H', b'head', [], _(b'id of head node'), _(b'ID')), |
1247 ('C', 'common', [], _('id of common node'), _('ID')), | 1267 (b'C', b'common', [], _(b'id of common node'), _(b'ID')), |
1248 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')), | 1268 ( |
1269 b't', | |
1270 b'type', | |
1271 b'bzip2', | |
1272 _(b'bundle compression type to use'), | |
1273 _(b'TYPE'), | |
1274 ), | |
1249 ], | 1275 ], |
1250 _('REPO FILE [-H|-C ID]...'), | 1276 _(b'REPO FILE [-H|-C ID]...'), |
1251 norepo=True, | 1277 norepo=True, |
1252 ) | 1278 ) |
1253 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts): | 1279 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts): |
1254 """retrieves a bundle from a repo | 1280 """retrieves a bundle from a repo |
1255 | 1281 |
1256 Every ID must be a full-length hex node id string. Saves the bundle to the | 1282 Every ID must be a full-length hex node id string. Saves the bundle to the |
1257 given file. | 1283 given file. |
1258 """ | 1284 """ |
1259 opts = pycompat.byteskwargs(opts) | 1285 opts = pycompat.byteskwargs(opts) |
1260 repo = hg.peer(ui, opts, repopath) | 1286 repo = hg.peer(ui, opts, repopath) |
1261 if not repo.capable('getbundle'): | 1287 if not repo.capable(b'getbundle'): |
1262 raise error.Abort("getbundle() not supported by target repository") | 1288 raise error.Abort(b"getbundle() not supported by target repository") |
1263 args = {} | 1289 args = {} |
1264 if common: | 1290 if common: |
1265 args[r'common'] = [bin(s) for s in common] | 1291 args[r'common'] = [bin(s) for s in common] |
1266 if head: | 1292 if head: |
1267 args[r'heads'] = [bin(s) for s in head] | 1293 args[r'heads'] = [bin(s) for s in head] |
1268 # TODO: get desired bundlecaps from command line. | 1294 # TODO: get desired bundlecaps from command line. |
1269 args[r'bundlecaps'] = None | 1295 args[r'bundlecaps'] = None |
1270 bundle = repo.getbundle('debug', **args) | 1296 bundle = repo.getbundle(b'debug', **args) |
1271 | 1297 |
1272 bundletype = opts.get('type', 'bzip2').lower() | 1298 bundletype = opts.get(b'type', b'bzip2').lower() |
1273 btypes = { | 1299 btypes = { |
1274 'none': 'HG10UN', | 1300 b'none': b'HG10UN', |
1275 'bzip2': 'HG10BZ', | 1301 b'bzip2': b'HG10BZ', |
1276 'gzip': 'HG10GZ', | 1302 b'gzip': b'HG10GZ', |
1277 'bundle2': 'HG20', | 1303 b'bundle2': b'HG20', |
1278 } | 1304 } |
1279 bundletype = btypes.get(bundletype) | 1305 bundletype = btypes.get(bundletype) |
1280 if bundletype not in bundle2.bundletypes: | 1306 if bundletype not in bundle2.bundletypes: |
1281 raise error.Abort(_('unknown bundle type specified with --type')) | 1307 raise error.Abort(_(b'unknown bundle type specified with --type')) |
1282 bundle2.writebundle(ui, bundle, bundlepath, bundletype) | 1308 bundle2.writebundle(ui, bundle, bundlepath, bundletype) |
1283 | 1309 |
1284 | 1310 |
1285 @command('debugignore', [], '[FILE]') | 1311 @command(b'debugignore', [], b'[FILE]') |
1286 def debugignore(ui, repo, *files, **opts): | 1312 def debugignore(ui, repo, *files, **opts): |
1287 """display the combined ignore pattern and information about ignored files | 1313 """display the combined ignore pattern and information about ignored files |
1288 | 1314 |
1289 With no argument display the combined ignore pattern. | 1315 With no argument display the combined ignore pattern. |
1290 | 1316 |
1292 if so, show the ignore rule (file and line number) that matched it. | 1318 if so, show the ignore rule (file and line number) that matched it. |
1293 """ | 1319 """ |
1294 ignore = repo.dirstate._ignore | 1320 ignore = repo.dirstate._ignore |
1295 if not files: | 1321 if not files: |
1296 # Show all the patterns | 1322 # Show all the patterns |
1297 ui.write("%s\n" % pycompat.byterepr(ignore)) | 1323 ui.write(b"%s\n" % pycompat.byterepr(ignore)) |
1298 else: | 1324 else: |
1299 m = scmutil.match(repo[None], pats=files) | 1325 m = scmutil.match(repo[None], pats=files) |
1300 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) | 1326 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) |
1301 for f in m.files(): | 1327 for f in m.files(): |
1302 nf = util.normpath(f) | 1328 nf = util.normpath(f) |
1303 ignored = None | 1329 ignored = None |
1304 ignoredata = None | 1330 ignoredata = None |
1305 if nf != '.': | 1331 if nf != b'.': |
1306 if ignore(nf): | 1332 if ignore(nf): |
1307 ignored = nf | 1333 ignored = nf |
1308 ignoredata = repo.dirstate._ignorefileandline(nf) | 1334 ignoredata = repo.dirstate._ignorefileandline(nf) |
1309 else: | 1335 else: |
1310 for p in util.finddirs(nf): | 1336 for p in util.finddirs(nf): |
1312 ignored = p | 1338 ignored = p |
1313 ignoredata = repo.dirstate._ignorefileandline(p) | 1339 ignoredata = repo.dirstate._ignorefileandline(p) |
1314 break | 1340 break |
1315 if ignored: | 1341 if ignored: |
1316 if ignored == nf: | 1342 if ignored == nf: |
1317 ui.write(_("%s is ignored\n") % uipathfn(f)) | 1343 ui.write(_(b"%s is ignored\n") % uipathfn(f)) |
1318 else: | 1344 else: |
1319 ui.write( | 1345 ui.write( |
1320 _( | 1346 _( |
1321 "%s is ignored because of " | 1347 b"%s is ignored because of " |
1322 "containing directory %s\n" | 1348 b"containing directory %s\n" |
1323 ) | 1349 ) |
1324 % (uipathfn(f), ignored) | 1350 % (uipathfn(f), ignored) |
1325 ) | 1351 ) |
1326 ignorefile, lineno, line = ignoredata | 1352 ignorefile, lineno, line = ignoredata |
1327 ui.write( | 1353 ui.write( |
1328 _("(ignore rule in %s, line %d: '%s')\n") | 1354 _(b"(ignore rule in %s, line %d: '%s')\n") |
1329 % (ignorefile, lineno, line) | 1355 % (ignorefile, lineno, line) |
1330 ) | 1356 ) |
1331 else: | 1357 else: |
1332 ui.write(_("%s is not ignored\n") % uipathfn(f)) | 1358 ui.write(_(b"%s is not ignored\n") % uipathfn(f)) |
1333 | 1359 |
1334 | 1360 |
1335 @command( | 1361 @command( |
1336 'debugindex', | 1362 b'debugindex', |
1337 cmdutil.debugrevlogopts + cmdutil.formatteropts, | 1363 cmdutil.debugrevlogopts + cmdutil.formatteropts, |
1338 _('-c|-m|FILE'), | 1364 _(b'-c|-m|FILE'), |
1339 ) | 1365 ) |
1340 def debugindex(ui, repo, file_=None, **opts): | 1366 def debugindex(ui, repo, file_=None, **opts): |
1341 """dump index data for a storage primitive""" | 1367 """dump index data for a storage primitive""" |
1342 opts = pycompat.byteskwargs(opts) | 1368 opts = pycompat.byteskwargs(opts) |
1343 store = cmdutil.openstorage(repo, 'debugindex', file_, opts) | 1369 store = cmdutil.openstorage(repo, b'debugindex', file_, opts) |
1344 | 1370 |
1345 if ui.debugflag: | 1371 if ui.debugflag: |
1346 shortfn = hex | 1372 shortfn = hex |
1347 else: | 1373 else: |
1348 shortfn = short | 1374 shortfn = short |
1350 idlen = 12 | 1376 idlen = 12 |
1351 for i in store: | 1377 for i in store: |
1352 idlen = len(shortfn(store.node(i))) | 1378 idlen = len(shortfn(store.node(i))) |
1353 break | 1379 break |
1354 | 1380 |
1355 fm = ui.formatter('debugindex', opts) | 1381 fm = ui.formatter(b'debugindex', opts) |
1356 fm.plain( | 1382 fm.plain( |
1357 b' rev linkrev %s %s p2\n' | 1383 b' rev linkrev %s %s p2\n' |
1358 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen)) | 1384 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen)) |
1359 ) | 1385 ) |
1360 | 1386 |
1362 node = store.node(rev) | 1388 node = store.node(rev) |
1363 parents = store.parents(node) | 1389 parents = store.parents(node) |
1364 | 1390 |
1365 fm.startitem() | 1391 fm.startitem() |
1366 fm.write(b'rev', b'%6d ', rev) | 1392 fm.write(b'rev', b'%6d ', rev) |
1367 fm.write(b'linkrev', '%7d ', store.linkrev(rev)) | 1393 fm.write(b'linkrev', b'%7d ', store.linkrev(rev)) |
1368 fm.write(b'node', '%s ', shortfn(node)) | 1394 fm.write(b'node', b'%s ', shortfn(node)) |
1369 fm.write(b'p1', '%s ', shortfn(parents[0])) | 1395 fm.write(b'p1', b'%s ', shortfn(parents[0])) |
1370 fm.write(b'p2', '%s', shortfn(parents[1])) | 1396 fm.write(b'p2', b'%s', shortfn(parents[1])) |
1371 fm.plain(b'\n') | 1397 fm.plain(b'\n') |
1372 | 1398 |
1373 fm.end() | 1399 fm.end() |
1374 | 1400 |
1375 | 1401 |
1376 @command( | 1402 @command( |
1377 'debugindexdot', cmdutil.debugrevlogopts, _('-c|-m|FILE'), optionalrepo=True | 1403 b'debugindexdot', |
1404 cmdutil.debugrevlogopts, | |
1405 _(b'-c|-m|FILE'), | |
1406 optionalrepo=True, | |
1378 ) | 1407 ) |
1379 def debugindexdot(ui, repo, file_=None, **opts): | 1408 def debugindexdot(ui, repo, file_=None, **opts): |
1380 """dump an index DAG as a graphviz dot file""" | 1409 """dump an index DAG as a graphviz dot file""" |
1381 opts = pycompat.byteskwargs(opts) | 1410 opts = pycompat.byteskwargs(opts) |
1382 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts) | 1411 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts) |
1383 ui.write("digraph G {\n") | 1412 ui.write(b"digraph G {\n") |
1384 for i in r: | 1413 for i in r: |
1385 node = r.node(i) | 1414 node = r.node(i) |
1386 pp = r.parents(node) | 1415 pp = r.parents(node) |
1387 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i)) | 1416 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i)) |
1388 if pp[1] != nullid: | 1417 if pp[1] != nullid: |
1389 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i)) | 1418 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i)) |
1390 ui.write("}\n") | 1419 ui.write(b"}\n") |
1391 | 1420 |
1392 | 1421 |
1393 @command('debugindexstats', []) | 1422 @command(b'debugindexstats', []) |
1394 def debugindexstats(ui, repo): | 1423 def debugindexstats(ui, repo): |
1395 """show stats related to the changelog index""" | 1424 """show stats related to the changelog index""" |
1396 repo.changelog.shortest(nullid, 1) | 1425 repo.changelog.shortest(nullid, 1) |
1397 index = repo.changelog.index | 1426 index = repo.changelog.index |
1398 if not util.safehasattr(index, 'stats'): | 1427 if not util.safehasattr(index, b'stats'): |
1399 raise error.Abort(_('debugindexstats only works with native code')) | 1428 raise error.Abort(_(b'debugindexstats only works with native code')) |
1400 for k, v in sorted(index.stats().items()): | 1429 for k, v in sorted(index.stats().items()): |
1401 ui.write('%s: %d\n' % (k, v)) | 1430 ui.write(b'%s: %d\n' % (k, v)) |
1402 | 1431 |
1403 | 1432 |
1404 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True) | 1433 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True) |
1405 def debuginstall(ui, **opts): | 1434 def debuginstall(ui, **opts): |
1406 '''test Mercurial installation | 1435 '''test Mercurial installation |
1407 | 1436 |
1408 Returns 0 on success. | 1437 Returns 0 on success. |
1409 ''' | 1438 ''' |
1410 opts = pycompat.byteskwargs(opts) | 1439 opts = pycompat.byteskwargs(opts) |
1411 | 1440 |
1412 problems = 0 | 1441 problems = 0 |
1413 | 1442 |
1414 fm = ui.formatter('debuginstall', opts) | 1443 fm = ui.formatter(b'debuginstall', opts) |
1415 fm.startitem() | 1444 fm.startitem() |
1416 | 1445 |
1417 # encoding | 1446 # encoding |
1418 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding) | 1447 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding) |
1419 err = None | 1448 err = None |
1420 try: | 1449 try: |
1421 codecs.lookup(pycompat.sysstr(encoding.encoding)) | 1450 codecs.lookup(pycompat.sysstr(encoding.encoding)) |
1422 except LookupError as inst: | 1451 except LookupError as inst: |
1423 err = stringutil.forcebytestr(inst) | 1452 err = stringutil.forcebytestr(inst) |
1424 problems += 1 | 1453 problems += 1 |
1425 fm.condwrite( | 1454 fm.condwrite( |
1426 err, | 1455 err, |
1427 'encodingerror', | 1456 b'encodingerror', |
1428 _(" %s\n" " (check that your locale is properly set)\n"), | 1457 _(b" %s\n" b" (check that your locale is properly set)\n"), |
1429 err, | 1458 err, |
1430 ) | 1459 ) |
1431 | 1460 |
1432 # Python | 1461 # Python |
1433 fm.write( | 1462 fm.write( |
1434 'pythonexe', | 1463 b'pythonexe', |
1435 _("checking Python executable (%s)\n"), | 1464 _(b"checking Python executable (%s)\n"), |
1436 pycompat.sysexecutable or _("unknown"), | 1465 pycompat.sysexecutable or _(b"unknown"), |
1437 ) | 1466 ) |
1438 fm.write( | 1467 fm.write( |
1439 'pythonver', | 1468 b'pythonver', |
1440 _("checking Python version (%s)\n"), | 1469 _(b"checking Python version (%s)\n"), |
1441 ("%d.%d.%d" % sys.version_info[:3]), | 1470 (b"%d.%d.%d" % sys.version_info[:3]), |
1442 ) | 1471 ) |
1443 fm.write( | 1472 fm.write( |
1444 'pythonlib', | 1473 b'pythonlib', |
1445 _("checking Python lib (%s)...\n"), | 1474 _(b"checking Python lib (%s)...\n"), |
1446 os.path.dirname(pycompat.fsencode(os.__file__)), | 1475 os.path.dirname(pycompat.fsencode(os.__file__)), |
1447 ) | 1476 ) |
1448 | 1477 |
1449 security = set(sslutil.supportedprotocols) | 1478 security = set(sslutil.supportedprotocols) |
1450 if sslutil.hassni: | 1479 if sslutil.hassni: |
1451 security.add('sni') | 1480 security.add(b'sni') |
1452 | 1481 |
1453 fm.write( | 1482 fm.write( |
1454 'pythonsecurity', | 1483 b'pythonsecurity', |
1455 _("checking Python security support (%s)\n"), | 1484 _(b"checking Python security support (%s)\n"), |
1456 fm.formatlist(sorted(security), name='protocol', fmt='%s', sep=','), | 1485 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','), |
1457 ) | 1486 ) |
1458 | 1487 |
1459 # These are warnings, not errors. So don't increment problem count. This | 1488 # These are warnings, not errors. So don't increment problem count. This |
1460 # may change in the future. | 1489 # may change in the future. |
1461 if 'tls1.2' not in security: | 1490 if b'tls1.2' not in security: |
1462 fm.plain( | 1491 fm.plain( |
1463 _( | 1492 _( |
1464 ' TLS 1.2 not supported by Python install; ' | 1493 b' TLS 1.2 not supported by Python install; ' |
1465 'network connections lack modern security\n' | 1494 b'network connections lack modern security\n' |
1466 ) | 1495 ) |
1467 ) | 1496 ) |
1468 if 'sni' not in security: | 1497 if b'sni' not in security: |
1469 fm.plain( | 1498 fm.plain( |
1470 _( | 1499 _( |
1471 ' SNI not supported by Python install; may have ' | 1500 b' SNI not supported by Python install; may have ' |
1472 'connectivity issues with some servers\n' | 1501 b'connectivity issues with some servers\n' |
1473 ) | 1502 ) |
1474 ) | 1503 ) |
1475 | 1504 |
1476 # TODO print CA cert info | 1505 # TODO print CA cert info |
1477 | 1506 |
1478 # hg version | 1507 # hg version |
1479 hgver = util.version() | 1508 hgver = util.version() |
1480 fm.write( | 1509 fm.write( |
1481 'hgver', _("checking Mercurial version (%s)\n"), hgver.split('+')[0] | 1510 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0] |
1482 ) | 1511 ) |
1483 fm.write( | 1512 fm.write( |
1484 'hgverextra', | 1513 b'hgverextra', |
1485 _("checking Mercurial custom build (%s)\n"), | 1514 _(b"checking Mercurial custom build (%s)\n"), |
1486 '+'.join(hgver.split('+')[1:]), | 1515 b'+'.join(hgver.split(b'+')[1:]), |
1487 ) | 1516 ) |
1488 | 1517 |
1489 # compiled modules | 1518 # compiled modules |
1490 fm.write( | 1519 fm.write( |
1491 'hgmodulepolicy', _("checking module policy (%s)\n"), policy.policy | 1520 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy |
1492 ) | 1521 ) |
1493 fm.write( | 1522 fm.write( |
1494 'hgmodules', | 1523 b'hgmodules', |
1495 _("checking installed modules (%s)...\n"), | 1524 _(b"checking installed modules (%s)...\n"), |
1496 os.path.dirname(pycompat.fsencode(__file__)), | 1525 os.path.dirname(pycompat.fsencode(__file__)), |
1497 ) | 1526 ) |
1498 | 1527 |
1499 rustandc = policy.policy in ('rust+c', 'rust+c-allow') | 1528 rustandc = policy.policy in (b'rust+c', b'rust+c-allow') |
1500 rustext = rustandc # for now, that's the only case | 1529 rustext = rustandc # for now, that's the only case |
1501 cext = policy.policy in ('c', 'allow') or rustandc | 1530 cext = policy.policy in (b'c', b'allow') or rustandc |
1502 nopure = cext or rustext | 1531 nopure = cext or rustext |
1503 if nopure: | 1532 if nopure: |
1504 err = None | 1533 err = None |
1505 try: | 1534 try: |
1506 if cext: | 1535 if cext: |
1521 | 1550 |
1522 dir(ancestor), dir(dirstate) # quiet pyflakes | 1551 dir(ancestor), dir(dirstate) # quiet pyflakes |
1523 except Exception as inst: | 1552 except Exception as inst: |
1524 err = stringutil.forcebytestr(inst) | 1553 err = stringutil.forcebytestr(inst) |
1525 problems += 1 | 1554 problems += 1 |
1526 fm.condwrite(err, 'extensionserror', " %s\n", err) | 1555 fm.condwrite(err, b'extensionserror', b" %s\n", err) |
1527 | 1556 |
1528 compengines = util.compengines._engines.values() | 1557 compengines = util.compengines._engines.values() |
1529 fm.write( | 1558 fm.write( |
1530 'compengines', | 1559 b'compengines', |
1531 _('checking registered compression engines (%s)\n'), | 1560 _(b'checking registered compression engines (%s)\n'), |
1532 fm.formatlist( | 1561 fm.formatlist( |
1533 sorted(e.name() for e in compengines), | 1562 sorted(e.name() for e in compengines), |
1534 name='compengine', | 1563 name=b'compengine', |
1535 fmt='%s', | 1564 fmt=b'%s', |
1536 sep=', ', | 1565 sep=b', ', |
1537 ), | 1566 ), |
1538 ) | 1567 ) |
1539 fm.write( | 1568 fm.write( |
1540 'compenginesavail', | 1569 b'compenginesavail', |
1541 _('checking available compression engines ' '(%s)\n'), | 1570 _(b'checking available compression engines ' b'(%s)\n'), |
1542 fm.formatlist( | 1571 fm.formatlist( |
1543 sorted(e.name() for e in compengines if e.available()), | 1572 sorted(e.name() for e in compengines if e.available()), |
1544 name='compengine', | 1573 name=b'compengine', |
1545 fmt='%s', | 1574 fmt=b'%s', |
1546 sep=', ', | 1575 sep=b', ', |
1547 ), | 1576 ), |
1548 ) | 1577 ) |
1549 wirecompengines = compression.compengines.supportedwireengines( | 1578 wirecompengines = compression.compengines.supportedwireengines( |
1550 compression.SERVERROLE | 1579 compression.SERVERROLE |
1551 ) | 1580 ) |
1552 fm.write( | 1581 fm.write( |
1553 'compenginesserver', | 1582 b'compenginesserver', |
1554 _('checking available compression engines ' 'for wire protocol (%s)\n'), | 1583 _( |
1584 b'checking available compression engines ' | |
1585 b'for wire protocol (%s)\n' | |
1586 ), | |
1555 fm.formatlist( | 1587 fm.formatlist( |
1556 [e.name() for e in wirecompengines if e.wireprotosupport()], | 1588 [e.name() for e in wirecompengines if e.wireprotosupport()], |
1557 name='compengine', | 1589 name=b'compengine', |
1558 fmt='%s', | 1590 fmt=b'%s', |
1559 sep=', ', | 1591 sep=b', ', |
1560 ), | 1592 ), |
1561 ) | 1593 ) |
1562 re2 = 'missing' | 1594 re2 = b'missing' |
1563 if util._re2: | 1595 if util._re2: |
1564 re2 = 'available' | 1596 re2 = b'available' |
1565 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2) | 1597 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2) |
1566 fm.data(re2=bool(util._re2)) | 1598 fm.data(re2=bool(util._re2)) |
1567 | 1599 |
1568 # templates | 1600 # templates |
1569 p = templater.templatepaths() | 1601 p = templater.templatepaths() |
1570 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p)) | 1602 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p)) |
1571 fm.condwrite(not p, '', _(" no template directories found\n")) | 1603 fm.condwrite(not p, b'', _(b" no template directories found\n")) |
1572 if p: | 1604 if p: |
1573 m = templater.templatepath("map-cmdline.default") | 1605 m = templater.templatepath(b"map-cmdline.default") |
1574 if m: | 1606 if m: |
1575 # template found, check if it is working | 1607 # template found, check if it is working |
1576 err = None | 1608 err = None |
1577 try: | 1609 try: |
1578 templater.templater.frommapfile(m) | 1610 templater.templater.frommapfile(m) |
1579 except Exception as inst: | 1611 except Exception as inst: |
1580 err = stringutil.forcebytestr(inst) | 1612 err = stringutil.forcebytestr(inst) |
1581 p = None | 1613 p = None |
1582 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err) | 1614 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err) |
1583 else: | 1615 else: |
1584 p = None | 1616 p = None |
1585 fm.condwrite( | 1617 fm.condwrite( |
1586 p, 'defaulttemplate', _("checking default template (%s)\n"), m | 1618 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m |
1587 ) | 1619 ) |
1588 fm.condwrite( | 1620 fm.condwrite( |
1589 not m, | 1621 not m, |
1590 'defaulttemplatenotfound', | 1622 b'defaulttemplatenotfound', |
1591 _(" template '%s' not found\n"), | 1623 _(b" template '%s' not found\n"), |
1592 "default", | 1624 b"default", |
1593 ) | 1625 ) |
1594 if not p: | 1626 if not p: |
1595 problems += 1 | 1627 problems += 1 |
1596 fm.condwrite( | 1628 fm.condwrite( |
1597 not p, '', _(" (templates seem to have been installed incorrectly)\n") | 1629 not p, b'', _(b" (templates seem to have been installed incorrectly)\n") |
1598 ) | 1630 ) |
1599 | 1631 |
1600 # editor | 1632 # editor |
1601 editor = ui.geteditor() | 1633 editor = ui.geteditor() |
1602 editor = util.expandpath(editor) | 1634 editor = util.expandpath(editor) |
1603 editorbin = procutil.shellsplit(editor)[0] | 1635 editorbin = procutil.shellsplit(editor)[0] |
1604 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin) | 1636 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin) |
1605 cmdpath = procutil.findexe(editorbin) | 1637 cmdpath = procutil.findexe(editorbin) |
1606 fm.condwrite( | 1638 fm.condwrite( |
1607 not cmdpath and editor == 'vi', | 1639 not cmdpath and editor == b'vi', |
1608 'vinotfound', | 1640 b'vinotfound', |
1609 _( | 1641 _( |
1610 " No commit editor set and can't find %s in PATH\n" | 1642 b" No commit editor set and can't find %s in PATH\n" |
1611 " (specify a commit editor in your configuration" | 1643 b" (specify a commit editor in your configuration" |
1612 " file)\n" | 1644 b" file)\n" |
1613 ), | 1645 ), |
1614 not cmdpath and editor == 'vi' and editorbin, | 1646 not cmdpath and editor == b'vi' and editorbin, |
1615 ) | 1647 ) |
1616 fm.condwrite( | 1648 fm.condwrite( |
1617 not cmdpath and editor != 'vi', | 1649 not cmdpath and editor != b'vi', |
1618 'editornotfound', | 1650 b'editornotfound', |
1619 _( | 1651 _( |
1620 " Can't find editor '%s' in PATH\n" | 1652 b" Can't find editor '%s' in PATH\n" |
1621 " (specify a commit editor in your configuration" | 1653 b" (specify a commit editor in your configuration" |
1622 " file)\n" | 1654 b" file)\n" |
1623 ), | 1655 ), |
1624 not cmdpath and editorbin, | 1656 not cmdpath and editorbin, |
1625 ) | 1657 ) |
1626 if not cmdpath and editor != 'vi': | 1658 if not cmdpath and editor != b'vi': |
1627 problems += 1 | 1659 problems += 1 |
1628 | 1660 |
1629 # check username | 1661 # check username |
1630 username = None | 1662 username = None |
1631 err = None | 1663 err = None |
1633 username = ui.username() | 1665 username = ui.username() |
1634 except error.Abort as e: | 1666 except error.Abort as e: |
1635 err = stringutil.forcebytestr(e) | 1667 err = stringutil.forcebytestr(e) |
1636 problems += 1 | 1668 problems += 1 |
1637 | 1669 |
1638 fm.condwrite(username, 'username', _("checking username (%s)\n"), username) | 1670 fm.condwrite( |
1671 username, b'username', _(b"checking username (%s)\n"), username | |
1672 ) | |
1639 fm.condwrite( | 1673 fm.condwrite( |
1640 err, | 1674 err, |
1641 'usernameerror', | 1675 b'usernameerror', |
1642 _( | 1676 _( |
1643 "checking username...\n %s\n" | 1677 b"checking username...\n %s\n" |
1644 " (specify a username in your configuration file)\n" | 1678 b" (specify a username in your configuration file)\n" |
1645 ), | 1679 ), |
1646 err, | 1680 err, |
1647 ) | 1681 ) |
1648 | 1682 |
1649 for name, mod in extensions.extensions(): | 1683 for name, mod in extensions.extensions(): |
1650 handler = getattr(mod, 'debuginstall', None) | 1684 handler = getattr(mod, 'debuginstall', None) |
1651 if handler is not None: | 1685 if handler is not None: |
1652 problems += handler(ui, fm) | 1686 problems += handler(ui, fm) |
1653 | 1687 |
1654 fm.condwrite(not problems, '', _("no problems detected\n")) | 1688 fm.condwrite(not problems, b'', _(b"no problems detected\n")) |
1655 if not problems: | 1689 if not problems: |
1656 fm.data(problems=problems) | 1690 fm.data(problems=problems) |
1657 fm.condwrite( | 1691 fm.condwrite( |
1658 problems, | 1692 problems, |
1659 'problems', | 1693 b'problems', |
1660 _("%d problems detected," " please check your install!\n"), | 1694 _(b"%d problems detected," b" please check your install!\n"), |
1661 problems, | 1695 problems, |
1662 ) | 1696 ) |
1663 fm.end() | 1697 fm.end() |
1664 | 1698 |
1665 return problems | 1699 return problems |
1666 | 1700 |
1667 | 1701 |
1668 @command('debugknown', [], _('REPO ID...'), norepo=True) | 1702 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True) |
1669 def debugknown(ui, repopath, *ids, **opts): | 1703 def debugknown(ui, repopath, *ids, **opts): |
1670 """test whether node ids are known to a repo | 1704 """test whether node ids are known to a repo |
1671 | 1705 |
1672 Every ID must be a full-length hex node id string. Returns a list of 0s | 1706 Every ID must be a full-length hex node id string. Returns a list of 0s |
1673 and 1s indicating unknown/known. | 1707 and 1s indicating unknown/known. |
1674 """ | 1708 """ |
1675 opts = pycompat.byteskwargs(opts) | 1709 opts = pycompat.byteskwargs(opts) |
1676 repo = hg.peer(ui, opts, repopath) | 1710 repo = hg.peer(ui, opts, repopath) |
1677 if not repo.capable('known'): | 1711 if not repo.capable(b'known'): |
1678 raise error.Abort("known() not supported by target repository") | 1712 raise error.Abort(b"known() not supported by target repository") |
1679 flags = repo.known([bin(s) for s in ids]) | 1713 flags = repo.known([bin(s) for s in ids]) |
1680 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags]))) | 1714 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags]))) |
1681 | 1715 |
1682 | 1716 |
1683 @command('debuglabelcomplete', [], _('LABEL...')) | 1717 @command(b'debuglabelcomplete', [], _(b'LABEL...')) |
1684 def debuglabelcomplete(ui, repo, *args): | 1718 def debuglabelcomplete(ui, repo, *args): |
1685 '''backwards compatibility with old bash completion scripts (DEPRECATED)''' | 1719 '''backwards compatibility with old bash completion scripts (DEPRECATED)''' |
1686 debugnamecomplete(ui, repo, *args) | 1720 debugnamecomplete(ui, repo, *args) |
1687 | 1721 |
1688 | 1722 |
1689 @command( | 1723 @command( |
1690 'debuglocks', | 1724 b'debuglocks', |
1691 [ | 1725 [ |
1692 ('L', 'force-lock', None, _('free the store lock (DANGEROUS)')), | 1726 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')), |
1693 ( | 1727 ( |
1694 'W', | 1728 b'W', |
1695 'force-wlock', | 1729 b'force-wlock', |
1696 None, | 1730 None, |
1697 _('free the working state lock (DANGEROUS)'), | 1731 _(b'free the working state lock (DANGEROUS)'), |
1698 ), | 1732 ), |
1699 ('s', 'set-lock', None, _('set the store lock until stopped')), | 1733 (b's', b'set-lock', None, _(b'set the store lock until stopped')), |
1700 ('S', 'set-wlock', None, _('set the working state lock until stopped')), | 1734 ( |
1735 b'S', | |
1736 b'set-wlock', | |
1737 None, | |
1738 _(b'set the working state lock until stopped'), | |
1739 ), | |
1701 ], | 1740 ], |
1702 _('[OPTION]...'), | 1741 _(b'[OPTION]...'), |
1703 ) | 1742 ) |
1704 def debuglocks(ui, repo, **opts): | 1743 def debuglocks(ui, repo, **opts): |
1705 """show or modify state of locks | 1744 """show or modify state of locks |
1706 | 1745 |
1707 By default, this command will show which locks are held. This | 1746 By default, this command will show which locks are held. This |
1725 Returns 0 if no locks are held. | 1764 Returns 0 if no locks are held. |
1726 | 1765 |
1727 """ | 1766 """ |
1728 | 1767 |
1729 if opts.get(r'force_lock'): | 1768 if opts.get(r'force_lock'): |
1730 repo.svfs.unlink('lock') | 1769 repo.svfs.unlink(b'lock') |
1731 if opts.get(r'force_wlock'): | 1770 if opts.get(r'force_wlock'): |
1732 repo.vfs.unlink('wlock') | 1771 repo.vfs.unlink(b'wlock') |
1733 if opts.get(r'force_lock') or opts.get(r'force_wlock'): | 1772 if opts.get(r'force_lock') or opts.get(r'force_wlock'): |
1734 return 0 | 1773 return 0 |
1735 | 1774 |
1736 locks = [] | 1775 locks = [] |
1737 try: | 1776 try: |
1738 if opts.get(r'set_wlock'): | 1777 if opts.get(r'set_wlock'): |
1739 try: | 1778 try: |
1740 locks.append(repo.wlock(False)) | 1779 locks.append(repo.wlock(False)) |
1741 except error.LockHeld: | 1780 except error.LockHeld: |
1742 raise error.Abort(_('wlock is already held')) | 1781 raise error.Abort(_(b'wlock is already held')) |
1743 if opts.get(r'set_lock'): | 1782 if opts.get(r'set_lock'): |
1744 try: | 1783 try: |
1745 locks.append(repo.lock(False)) | 1784 locks.append(repo.lock(False)) |
1746 except error.LockHeld: | 1785 except error.LockHeld: |
1747 raise error.Abort(_('lock is already held')) | 1786 raise error.Abort(_(b'lock is already held')) |
1748 if len(locks): | 1787 if len(locks): |
1749 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes")) | 1788 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes")) |
1750 return 0 | 1789 return 0 |
1751 finally: | 1790 finally: |
1752 release(*locks) | 1791 release(*locks) |
1753 | 1792 |
1754 now = time.time() | 1793 now = time.time() |
1767 try: | 1806 try: |
1768 st = vfs.lstat(name) | 1807 st = vfs.lstat(name) |
1769 age = now - st[stat.ST_MTIME] | 1808 age = now - st[stat.ST_MTIME] |
1770 user = util.username(st.st_uid) | 1809 user = util.username(st.st_uid) |
1771 locker = vfs.readlock(name) | 1810 locker = vfs.readlock(name) |
1772 if ":" in locker: | 1811 if b":" in locker: |
1773 host, pid = locker.split(':') | 1812 host, pid = locker.split(b':') |
1774 if host == socket.gethostname(): | 1813 if host == socket.gethostname(): |
1775 locker = 'user %s, process %s' % (user or b'None', pid) | 1814 locker = b'user %s, process %s' % (user or b'None', pid) |
1776 else: | 1815 else: |
1777 locker = 'user %s, process %s, host %s' % ( | 1816 locker = b'user %s, process %s, host %s' % ( |
1778 user or b'None', | 1817 user or b'None', |
1779 pid, | 1818 pid, |
1780 host, | 1819 host, |
1781 ) | 1820 ) |
1782 ui.write("%-6s %s (%ds)\n" % (name + ":", locker, age)) | 1821 ui.write(b"%-6s %s (%ds)\n" % (name + b":", locker, age)) |
1783 return 1 | 1822 return 1 |
1784 except OSError as e: | 1823 except OSError as e: |
1785 if e.errno != errno.ENOENT: | 1824 if e.errno != errno.ENOENT: |
1786 raise | 1825 raise |
1787 | 1826 |
1788 ui.write("%-6s free\n" % (name + ":")) | 1827 ui.write(b"%-6s free\n" % (name + b":")) |
1789 return 0 | 1828 return 0 |
1790 | 1829 |
1791 held += report(repo.svfs, "lock", repo.lock) | 1830 held += report(repo.svfs, b"lock", repo.lock) |
1792 held += report(repo.vfs, "wlock", repo.wlock) | 1831 held += report(repo.vfs, b"wlock", repo.wlock) |
1793 | 1832 |
1794 return held | 1833 return held |
1795 | 1834 |
1796 | 1835 |
1797 @command( | 1836 @command( |
1798 'debugmanifestfulltextcache', | 1837 b'debugmanifestfulltextcache', |
1799 [ | 1838 [ |
1800 ('', 'clear', False, _('clear the cache')), | 1839 (b'', b'clear', False, _(b'clear the cache')), |
1801 ( | 1840 ( |
1802 'a', | 1841 b'a', |
1803 'add', | 1842 b'add', |
1804 [], | 1843 [], |
1805 _('add the given manifest nodes to the cache'), | 1844 _(b'add the given manifest nodes to the cache'), |
1806 _('NODE'), | 1845 _(b'NODE'), |
1807 ), | 1846 ), |
1808 ], | 1847 ], |
1809 '', | 1848 b'', |
1810 ) | 1849 ) |
1811 def debugmanifestfulltextcache(ui, repo, add=(), **opts): | 1850 def debugmanifestfulltextcache(ui, repo, add=(), **opts): |
1812 """show, clear or amend the contents of the manifest fulltext cache""" | 1851 """show, clear or amend the contents of the manifest fulltext cache""" |
1813 | 1852 |
1814 def getcache(): | 1853 def getcache(): |
1815 r = repo.manifestlog.getstorage(b'') | 1854 r = repo.manifestlog.getstorage(b'') |
1816 try: | 1855 try: |
1817 return r._fulltextcache | 1856 return r._fulltextcache |
1818 except AttributeError: | 1857 except AttributeError: |
1819 msg = _( | 1858 msg = _( |
1820 "Current revlog implementation doesn't appear to have a " | 1859 b"Current revlog implementation doesn't appear to have a " |
1821 "manifest fulltext cache\n" | 1860 b"manifest fulltext cache\n" |
1822 ) | 1861 ) |
1823 raise error.Abort(msg) | 1862 raise error.Abort(msg) |
1824 | 1863 |
1825 if opts.get(r'clear'): | 1864 if opts.get(r'clear'): |
1826 with repo.wlock(): | 1865 with repo.wlock(): |
1834 store = m.getstorage(b'') | 1873 store = m.getstorage(b'') |
1835 for n in add: | 1874 for n in add: |
1836 try: | 1875 try: |
1837 manifest = m[store.lookup(n)] | 1876 manifest = m[store.lookup(n)] |
1838 except error.LookupError as e: | 1877 except error.LookupError as e: |
1839 raise error.Abort(e, hint="Check your manifest node id") | 1878 raise error.Abort(e, hint=b"Check your manifest node id") |
1840 manifest.read() # stores revisision in cache too | 1879 manifest.read() # stores revisision in cache too |
1841 return | 1880 return |
1842 | 1881 |
1843 cache = getcache() | 1882 cache = getcache() |
1844 if not len(cache): | 1883 if not len(cache): |
1845 ui.write(_('cache empty\n')) | 1884 ui.write(_(b'cache empty\n')) |
1846 else: | 1885 else: |
1847 ui.write( | 1886 ui.write( |
1848 _( | 1887 _( |
1849 'cache contains %d manifest entries, in order of most to ' | 1888 b'cache contains %d manifest entries, in order of most to ' |
1850 'least recent:\n' | 1889 b'least recent:\n' |
1851 ) | 1890 ) |
1852 % (len(cache),) | 1891 % (len(cache),) |
1853 ) | 1892 ) |
1854 totalsize = 0 | 1893 totalsize = 0 |
1855 for nodeid in cache: | 1894 for nodeid in cache: |
1856 # Use cache.get to not update the LRU order | 1895 # Use cache.get to not update the LRU order |
1857 data = cache.peek(nodeid) | 1896 data = cache.peek(nodeid) |
1858 size = len(data) | 1897 size = len(data) |
1859 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size | 1898 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size |
1860 ui.write( | 1899 ui.write( |
1861 _('id: %s, size %s\n') % (hex(nodeid), util.bytecount(size)) | 1900 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size)) |
1862 ) | 1901 ) |
1863 ondisk = cache._opener.stat('manifestfulltextcache').st_size | 1902 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size |
1864 ui.write( | 1903 ui.write( |
1865 _('total cache data size %s, on-disk %s\n') | 1904 _(b'total cache data size %s, on-disk %s\n') |
1866 % (util.bytecount(totalsize), util.bytecount(ondisk)) | 1905 % (util.bytecount(totalsize), util.bytecount(ondisk)) |
1867 ) | 1906 ) |
1868 | 1907 |
1869 | 1908 |
1870 @command('debugmergestate', [], '') | 1909 @command(b'debugmergestate', [], b'') |
1871 def debugmergestate(ui, repo, *args): | 1910 def debugmergestate(ui, repo, *args): |
1872 """print merge state | 1911 """print merge state |
1873 | 1912 |
1874 Use --verbose to print out information about whether v1 or v2 merge state | 1913 Use --verbose to print out information about whether v1 or v2 merge state |
1875 was chosen.""" | 1914 was chosen.""" |
1876 | 1915 |
1877 def _hashornull(h): | 1916 def _hashornull(h): |
1878 if h == nullhex: | 1917 if h == nullhex: |
1879 return 'null' | 1918 return b'null' |
1880 else: | 1919 else: |
1881 return h | 1920 return h |
1882 | 1921 |
1883 def printrecords(version): | 1922 def printrecords(version): |
1884 ui.write('* version %d records\n' % version) | 1923 ui.write(b'* version %d records\n' % version) |
1885 if version == 1: | 1924 if version == 1: |
1886 records = v1records | 1925 records = v1records |
1887 else: | 1926 else: |
1888 records = v2records | 1927 records = v2records |
1889 | 1928 |
1890 for rtype, record in records: | 1929 for rtype, record in records: |
1891 # pretty print some record types | 1930 # pretty print some record types |
1892 if rtype == 'L': | 1931 if rtype == b'L': |
1893 ui.write('local: %s\n' % record) | 1932 ui.write(b'local: %s\n' % record) |
1894 elif rtype == 'O': | 1933 elif rtype == b'O': |
1895 ui.write('other: %s\n' % record) | 1934 ui.write(b'other: %s\n' % record) |
1896 elif rtype == 'm': | 1935 elif rtype == b'm': |
1897 driver, mdstate = record.split('\0', 1) | 1936 driver, mdstate = record.split(b'\0', 1) |
1898 ui.write('merge driver: %s (state "%s")\n' % (driver, mdstate)) | 1937 ui.write(b'merge driver: %s (state "%s")\n' % (driver, mdstate)) |
1899 elif rtype in 'FDC': | 1938 elif rtype in b'FDC': |
1900 r = record.split('\0') | 1939 r = record.split(b'\0') |
1901 f, state, hash, lfile, afile, anode, ofile = r[0:7] | 1940 f, state, hash, lfile, afile, anode, ofile = r[0:7] |
1902 if version == 1: | 1941 if version == 1: |
1903 onode = 'not stored in v1 format' | 1942 onode = b'not stored in v1 format' |
1904 flags = r[7] | 1943 flags = r[7] |
1905 else: | 1944 else: |
1906 onode, flags = r[7:9] | 1945 onode, flags = r[7:9] |
1907 ui.write( | 1946 ui.write( |
1908 'file: %s (record type "%s", state "%s", hash %s)\n' | 1947 b'file: %s (record type "%s", state "%s", hash %s)\n' |
1909 % (f, rtype, state, _hashornull(hash)) | 1948 % (f, rtype, state, _hashornull(hash)) |
1910 ) | 1949 ) |
1911 ui.write(' local path: %s (flags "%s")\n' % (lfile, flags)) | 1950 ui.write(b' local path: %s (flags "%s")\n' % (lfile, flags)) |
1912 ui.write( | 1951 ui.write( |
1913 ' ancestor path: %s (node %s)\n' | 1952 b' ancestor path: %s (node %s)\n' |
1914 % (afile, _hashornull(anode)) | 1953 % (afile, _hashornull(anode)) |
1915 ) | 1954 ) |
1916 ui.write( | 1955 ui.write( |
1917 ' other path: %s (node %s)\n' % (ofile, _hashornull(onode)) | 1956 b' other path: %s (node %s)\n' |
1957 % (ofile, _hashornull(onode)) | |
1918 ) | 1958 ) |
1919 elif rtype == 'f': | 1959 elif rtype == b'f': |
1920 filename, rawextras = record.split('\0', 1) | 1960 filename, rawextras = record.split(b'\0', 1) |
1921 extras = rawextras.split('\0') | 1961 extras = rawextras.split(b'\0') |
1922 i = 0 | 1962 i = 0 |
1923 extrastrings = [] | 1963 extrastrings = [] |
1924 while i < len(extras): | 1964 while i < len(extras): |
1925 extrastrings.append('%s = %s' % (extras[i], extras[i + 1])) | 1965 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1])) |
1926 i += 2 | 1966 i += 2 |
1927 | 1967 |
1928 ui.write( | 1968 ui.write( |
1929 'file extras: %s (%s)\n' | 1969 b'file extras: %s (%s)\n' |
1930 % (filename, ', '.join(extrastrings)) | 1970 % (filename, b', '.join(extrastrings)) |
1931 ) | 1971 ) |
1932 elif rtype == 'l': | 1972 elif rtype == b'l': |
1933 labels = record.split('\0', 2) | 1973 labels = record.split(b'\0', 2) |
1934 labels = [l for l in labels if len(l) > 0] | 1974 labels = [l for l in labels if len(l) > 0] |
1935 ui.write('labels:\n') | 1975 ui.write(b'labels:\n') |
1936 ui.write((' local: %s\n' % labels[0])) | 1976 ui.write((b' local: %s\n' % labels[0])) |
1937 ui.write((' other: %s\n' % labels[1])) | 1977 ui.write((b' other: %s\n' % labels[1])) |
1938 if len(labels) > 2: | 1978 if len(labels) > 2: |
1939 ui.write((' base: %s\n' % labels[2])) | 1979 ui.write((b' base: %s\n' % labels[2])) |
1940 else: | 1980 else: |
1941 ui.write( | 1981 ui.write( |
1942 'unrecognized entry: %s\t%s\n' | 1982 b'unrecognized entry: %s\t%s\n' |
1943 % (rtype, record.replace('\0', '\t')) | 1983 % (rtype, record.replace(b'\0', b'\t')) |
1944 ) | 1984 ) |
1945 | 1985 |
1946 # Avoid mergestate.read() since it may raise an exception for unsupported | 1986 # Avoid mergestate.read() since it may raise an exception for unsupported |
1947 # merge state records. We shouldn't be doing this, but this is OK since this | 1987 # merge state records. We shouldn't be doing this, but this is OK since this |
1948 # command is pretty low-level. | 1988 # command is pretty low-level. |
1949 ms = mergemod.mergestate(repo) | 1989 ms = mergemod.mergestate(repo) |
1950 | 1990 |
1951 # sort so that reasonable information is on top | 1991 # sort so that reasonable information is on top |
1952 v1records = ms._readrecordsv1() | 1992 v1records = ms._readrecordsv1() |
1953 v2records = ms._readrecordsv2() | 1993 v2records = ms._readrecordsv2() |
1954 order = 'LOml' | 1994 order = b'LOml' |
1955 | 1995 |
1956 def key(r): | 1996 def key(r): |
1957 idx = order.find(r[0]) | 1997 idx = order.find(r[0]) |
1958 if idx == -1: | 1998 if idx == -1: |
1959 return (1, r[1]) | 1999 return (1, r[1]) |
1962 | 2002 |
1963 v1records.sort(key=key) | 2003 v1records.sort(key=key) |
1964 v2records.sort(key=key) | 2004 v2records.sort(key=key) |
1965 | 2005 |
1966 if not v1records and not v2records: | 2006 if not v1records and not v2records: |
1967 ui.write('no merge state found\n') | 2007 ui.write(b'no merge state found\n') |
1968 elif not v2records: | 2008 elif not v2records: |
1969 ui.note('no version 2 merge state\n') | 2009 ui.note(b'no version 2 merge state\n') |
1970 printrecords(1) | 2010 printrecords(1) |
1971 elif ms._v1v2match(v1records, v2records): | 2011 elif ms._v1v2match(v1records, v2records): |
1972 ui.note('v1 and v2 states match: using v2\n') | 2012 ui.note(b'v1 and v2 states match: using v2\n') |
1973 printrecords(2) | 2013 printrecords(2) |
1974 else: | 2014 else: |
1975 ui.note('v1 and v2 states mismatch: using v1\n') | 2015 ui.note(b'v1 and v2 states mismatch: using v1\n') |
1976 printrecords(1) | 2016 printrecords(1) |
1977 if ui.verbose: | 2017 if ui.verbose: |
1978 printrecords(2) | 2018 printrecords(2) |
1979 | 2019 |
1980 | 2020 |
1981 @command('debugnamecomplete', [], _('NAME...')) | 2021 @command(b'debugnamecomplete', [], _(b'NAME...')) |
1982 def debugnamecomplete(ui, repo, *args): | 2022 def debugnamecomplete(ui, repo, *args): |
1983 '''complete "names" - tags, open branch names, bookmark names''' | 2023 '''complete "names" - tags, open branch names, bookmark names''' |
1984 | 2024 |
1985 names = set() | 2025 names = set() |
1986 # since we previously only listed open branches, we will handle that | 2026 # since we previously only listed open branches, we will handle that |
1987 # specially (after this for loop) | 2027 # specially (after this for loop) |
1988 for name, ns in repo.names.iteritems(): | 2028 for name, ns in repo.names.iteritems(): |
1989 if name != 'branches': | 2029 if name != b'branches': |
1990 names.update(ns.listnames(repo)) | 2030 names.update(ns.listnames(repo)) |
1991 names.update( | 2031 names.update( |
1992 tag | 2032 tag |
1993 for (tag, heads, tip, closed) in repo.branchmap().iterbranches() | 2033 for (tag, heads, tip, closed) in repo.branchmap().iterbranches() |
1994 if not closed | 2034 if not closed |
1995 ) | 2035 ) |
1996 completions = set() | 2036 completions = set() |
1997 if not args: | 2037 if not args: |
1998 args = [''] | 2038 args = [b''] |
1999 for a in args: | 2039 for a in args: |
2000 completions.update(n for n in names if n.startswith(a)) | 2040 completions.update(n for n in names if n.startswith(a)) |
2001 ui.write('\n'.join(sorted(completions))) | 2041 ui.write(b'\n'.join(sorted(completions))) |
2002 ui.write('\n') | 2042 ui.write(b'\n') |
2003 | 2043 |
2004 | 2044 |
2005 @command( | 2045 @command( |
2006 'debugobsolete', | 2046 b'debugobsolete', |
2007 [ | 2047 [ |
2008 ('', 'flags', 0, _('markers flag')), | 2048 (b'', b'flags', 0, _(b'markers flag')), |
2009 ( | 2049 ( |
2010 '', | 2050 b'', |
2011 'record-parents', | 2051 b'record-parents', |
2012 False, | 2052 False, |
2013 _('record parent information for the precursor'), | 2053 _(b'record parent information for the precursor'), |
2014 ), | 2054 ), |
2015 ('r', 'rev', [], _('display markers relevant to REV')), | 2055 (b'r', b'rev', [], _(b'display markers relevant to REV')), |
2016 ( | 2056 ( |
2017 '', | 2057 b'', |
2018 'exclusive', | 2058 b'exclusive', |
2019 False, | 2059 False, |
2020 _('restrict display to markers only ' 'relevant to REV'), | 2060 _(b'restrict display to markers only ' b'relevant to REV'), |
2021 ), | 2061 ), |
2022 ('', 'index', False, _('display index of the marker')), | 2062 (b'', b'index', False, _(b'display index of the marker')), |
2023 ('', 'delete', [], _('delete markers specified by indices')), | 2063 (b'', b'delete', [], _(b'delete markers specified by indices')), |
2024 ] | 2064 ] |
2025 + cmdutil.commitopts2 | 2065 + cmdutil.commitopts2 |
2026 + cmdutil.formatteropts, | 2066 + cmdutil.formatteropts, |
2027 _('[OBSOLETED [REPLACEMENT ...]]'), | 2067 _(b'[OBSOLETED [REPLACEMENT ...]]'), |
2028 ) | 2068 ) |
2029 def debugobsolete(ui, repo, precursor=None, *successors, **opts): | 2069 def debugobsolete(ui, repo, precursor=None, *successors, **opts): |
2030 """create arbitrary obsolete marker | 2070 """create arbitrary obsolete marker |
2031 | 2071 |
2032 With no arguments, displays the list of obsolescence markers.""" | 2072 With no arguments, displays the list of obsolescence markers.""" |
2042 if len(n) != len(nullid): | 2082 if len(n) != len(nullid): |
2043 raise TypeError() | 2083 raise TypeError() |
2044 return n | 2084 return n |
2045 except TypeError: | 2085 except TypeError: |
2046 raise error.Abort( | 2086 raise error.Abort( |
2047 'changeset references must be full hexadecimal ' | 2087 b'changeset references must be full hexadecimal ' |
2048 'node identifiers' | 2088 b'node identifiers' |
2049 ) | 2089 ) |
2050 | 2090 |
2051 if opts.get('delete'): | 2091 if opts.get(b'delete'): |
2052 indices = [] | 2092 indices = [] |
2053 for v in opts.get('delete'): | 2093 for v in opts.get(b'delete'): |
2054 try: | 2094 try: |
2055 indices.append(int(v)) | 2095 indices.append(int(v)) |
2056 except ValueError: | 2096 except ValueError: |
2057 raise error.Abort( | 2097 raise error.Abort( |
2058 _('invalid index value: %r') % v, | 2098 _(b'invalid index value: %r') % v, |
2059 hint=_('use integers for indices'), | 2099 hint=_(b'use integers for indices'), |
2060 ) | 2100 ) |
2061 | 2101 |
2062 if repo.currenttransaction(): | 2102 if repo.currenttransaction(): |
2063 raise error.Abort( | 2103 raise error.Abort( |
2064 _('cannot delete obsmarkers in the middle ' 'of transaction.') | 2104 _(b'cannot delete obsmarkers in the middle ' b'of transaction.') |
2065 ) | 2105 ) |
2066 | 2106 |
2067 with repo.lock(): | 2107 with repo.lock(): |
2068 n = repair.deleteobsmarkers(repo.obsstore, indices) | 2108 n = repair.deleteobsmarkers(repo.obsstore, indices) |
2069 ui.write(_('deleted %i obsolescence markers\n') % n) | 2109 ui.write(_(b'deleted %i obsolescence markers\n') % n) |
2070 | 2110 |
2071 return | 2111 return |
2072 | 2112 |
2073 if precursor is not None: | 2113 if precursor is not None: |
2074 if opts['rev']: | 2114 if opts[b'rev']: |
2075 raise error.Abort('cannot select revision when creating marker') | 2115 raise error.Abort(b'cannot select revision when creating marker') |
2076 metadata = {} | 2116 metadata = {} |
2077 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username()) | 2117 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username()) |
2078 succs = tuple(parsenodeid(succ) for succ in successors) | 2118 succs = tuple(parsenodeid(succ) for succ in successors) |
2079 l = repo.lock() | 2119 l = repo.lock() |
2080 try: | 2120 try: |
2081 tr = repo.transaction('debugobsolete') | 2121 tr = repo.transaction(b'debugobsolete') |
2082 try: | 2122 try: |
2083 date = opts.get('date') | 2123 date = opts.get(b'date') |
2084 if date: | 2124 if date: |
2085 date = dateutil.parsedate(date) | 2125 date = dateutil.parsedate(date) |
2086 else: | 2126 else: |
2087 date = None | 2127 date = None |
2088 prec = parsenodeid(precursor) | 2128 prec = parsenodeid(precursor) |
2089 parents = None | 2129 parents = None |
2090 if opts['record_parents']: | 2130 if opts[b'record_parents']: |
2091 if prec not in repo.unfiltered(): | 2131 if prec not in repo.unfiltered(): |
2092 raise error.Abort( | 2132 raise error.Abort( |
2093 'cannot used --record-parents on ' | 2133 b'cannot used --record-parents on ' |
2094 'unknown changesets' | 2134 b'unknown changesets' |
2095 ) | 2135 ) |
2096 parents = repo.unfiltered()[prec].parents() | 2136 parents = repo.unfiltered()[prec].parents() |
2097 parents = tuple(p.node() for p in parents) | 2137 parents = tuple(p.node() for p in parents) |
2098 repo.obsstore.create( | 2138 repo.obsstore.create( |
2099 tr, | 2139 tr, |
2100 prec, | 2140 prec, |
2101 succs, | 2141 succs, |
2102 opts['flags'], | 2142 opts[b'flags'], |
2103 parents=parents, | 2143 parents=parents, |
2104 date=date, | 2144 date=date, |
2105 metadata=metadata, | 2145 metadata=metadata, |
2106 ui=ui, | 2146 ui=ui, |
2107 ) | 2147 ) |
2108 tr.close() | 2148 tr.close() |
2109 except ValueError as exc: | 2149 except ValueError as exc: |
2110 raise error.Abort( | 2150 raise error.Abort( |
2111 _('bad obsmarker input: %s') % pycompat.bytestr(exc) | 2151 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc) |
2112 ) | 2152 ) |
2113 finally: | 2153 finally: |
2114 tr.release() | 2154 tr.release() |
2115 finally: | 2155 finally: |
2116 l.release() | 2156 l.release() |
2117 else: | 2157 else: |
2118 if opts['rev']: | 2158 if opts[b'rev']: |
2119 revs = scmutil.revrange(repo, opts['rev']) | 2159 revs = scmutil.revrange(repo, opts[b'rev']) |
2120 nodes = [repo[r].node() for r in revs] | 2160 nodes = [repo[r].node() for r in revs] |
2121 markers = list( | 2161 markers = list( |
2122 obsutil.getmarkers( | 2162 obsutil.getmarkers( |
2123 repo, nodes=nodes, exclusive=opts['exclusive'] | 2163 repo, nodes=nodes, exclusive=opts[b'exclusive'] |
2124 ) | 2164 ) |
2125 ) | 2165 ) |
2126 markers.sort(key=lambda x: x._data) | 2166 markers.sort(key=lambda x: x._data) |
2127 else: | 2167 else: |
2128 markers = obsutil.getmarkers(repo) | 2168 markers = obsutil.getmarkers(repo) |
2129 | 2169 |
2130 markerstoiter = markers | 2170 markerstoiter = markers |
2131 isrelevant = lambda m: True | 2171 isrelevant = lambda m: True |
2132 if opts.get('rev') and opts.get('index'): | 2172 if opts.get(b'rev') and opts.get(b'index'): |
2133 markerstoiter = obsutil.getmarkers(repo) | 2173 markerstoiter = obsutil.getmarkers(repo) |
2134 markerset = set(markers) | 2174 markerset = set(markers) |
2135 isrelevant = lambda m: m in markerset | 2175 isrelevant = lambda m: m in markerset |
2136 | 2176 |
2137 fm = ui.formatter('debugobsolete', opts) | 2177 fm = ui.formatter(b'debugobsolete', opts) |
2138 for i, m in enumerate(markerstoiter): | 2178 for i, m in enumerate(markerstoiter): |
2139 if not isrelevant(m): | 2179 if not isrelevant(m): |
2140 # marker can be irrelevant when we're iterating over a set | 2180 # marker can be irrelevant when we're iterating over a set |
2141 # of markers (markerstoiter) which is bigger than the set | 2181 # of markers (markerstoiter) which is bigger than the set |
2142 # of markers we want to display (markers) | 2182 # of markers we want to display (markers) |
2144 # provided and thus we need to iterate over all of the markers | 2184 # provided and thus we need to iterate over all of the markers |
2145 # to get the correct indices, but only display the ones that | 2185 # to get the correct indices, but only display the ones that |
2146 # are relevant to --rev value | 2186 # are relevant to --rev value |
2147 continue | 2187 continue |
2148 fm.startitem() | 2188 fm.startitem() |
2149 ind = i if opts.get('index') else None | 2189 ind = i if opts.get(b'index') else None |
2150 cmdutil.showmarker(fm, m, index=ind) | 2190 cmdutil.showmarker(fm, m, index=ind) |
2151 fm.end() | 2191 fm.end() |
2152 | 2192 |
2153 | 2193 |
2154 @command( | 2194 @command( |
2155 'debugp1copies', | 2195 b'debugp1copies', |
2156 [('r', 'rev', '', _('revision to debug'), _('REV'))], | 2196 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))], |
2157 _('[-r REV]'), | 2197 _(b'[-r REV]'), |
2158 ) | 2198 ) |
2159 def debugp1copies(ui, repo, **opts): | 2199 def debugp1copies(ui, repo, **opts): |
2160 """dump copy information compared to p1""" | 2200 """dump copy information compared to p1""" |
2161 | 2201 |
2162 opts = pycompat.byteskwargs(opts) | 2202 opts = pycompat.byteskwargs(opts) |
2163 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None) | 2203 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None) |
2164 for dst, src in ctx.p1copies().items(): | 2204 for dst, src in ctx.p1copies().items(): |
2165 ui.write('%s -> %s\n' % (src, dst)) | 2205 ui.write(b'%s -> %s\n' % (src, dst)) |
2166 | 2206 |
2167 | 2207 |
2168 @command( | 2208 @command( |
2169 'debugp2copies', | 2209 b'debugp2copies', |
2170 [('r', 'rev', '', _('revision to debug'), _('REV'))], | 2210 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))], |
2171 _('[-r REV]'), | 2211 _(b'[-r REV]'), |
2172 ) | 2212 ) |
2173 def debugp1copies(ui, repo, **opts): | 2213 def debugp1copies(ui, repo, **opts): |
2174 """dump copy information compared to p2""" | 2214 """dump copy information compared to p2""" |
2175 | 2215 |
2176 opts = pycompat.byteskwargs(opts) | 2216 opts = pycompat.byteskwargs(opts) |
2177 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None) | 2217 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None) |
2178 for dst, src in ctx.p2copies().items(): | 2218 for dst, src in ctx.p2copies().items(): |
2179 ui.write('%s -> %s\n' % (src, dst)) | 2219 ui.write(b'%s -> %s\n' % (src, dst)) |
2180 | 2220 |
2181 | 2221 |
2182 @command( | 2222 @command( |
2183 'debugpathcomplete', | 2223 b'debugpathcomplete', |
2184 [ | 2224 [ |
2185 ('f', 'full', None, _('complete an entire path')), | 2225 (b'f', b'full', None, _(b'complete an entire path')), |
2186 ('n', 'normal', None, _('show only normal files')), | 2226 (b'n', b'normal', None, _(b'show only normal files')), |
2187 ('a', 'added', None, _('show only added files')), | 2227 (b'a', b'added', None, _(b'show only added files')), |
2188 ('r', 'removed', None, _('show only removed files')), | 2228 (b'r', b'removed', None, _(b'show only removed files')), |
2189 ], | 2229 ], |
2190 _('FILESPEC...'), | 2230 _(b'FILESPEC...'), |
2191 ) | 2231 ) |
2192 def debugpathcomplete(ui, repo, *specs, **opts): | 2232 def debugpathcomplete(ui, repo, *specs, **opts): |
2193 '''complete part or all of a tracked path | 2233 '''complete part or all of a tracked path |
2194 | 2234 |
2195 This command supports shells that offer path name completion. It | 2235 This command supports shells that offer path name completion. It |
2203 spec = os.path.normpath(os.path.join(encoding.getcwd(), path)) | 2243 spec = os.path.normpath(os.path.join(encoding.getcwd(), path)) |
2204 rootdir = repo.root + pycompat.ossep | 2244 rootdir = repo.root + pycompat.ossep |
2205 if spec != repo.root and not spec.startswith(rootdir): | 2245 if spec != repo.root and not spec.startswith(rootdir): |
2206 return [], [] | 2246 return [], [] |
2207 if os.path.isdir(spec): | 2247 if os.path.isdir(spec): |
2208 spec += '/' | 2248 spec += b'/' |
2209 spec = spec[len(rootdir) :] | 2249 spec = spec[len(rootdir) :] |
2210 fixpaths = pycompat.ossep != '/' | 2250 fixpaths = pycompat.ossep != b'/' |
2211 if fixpaths: | 2251 if fixpaths: |
2212 spec = spec.replace(pycompat.ossep, '/') | 2252 spec = spec.replace(pycompat.ossep, b'/') |
2213 speclen = len(spec) | 2253 speclen = len(spec) |
2214 fullpaths = opts[r'full'] | 2254 fullpaths = opts[r'full'] |
2215 files, dirs = set(), set() | 2255 files, dirs = set(), set() |
2216 adddir, addfile = dirs.add, files.add | 2256 adddir, addfile = dirs.add, files.add |
2217 for f, st in dirstate.iteritems(): | 2257 for f, st in dirstate.iteritems(): |
2218 if f.startswith(spec) and st[0] in acceptable: | 2258 if f.startswith(spec) and st[0] in acceptable: |
2219 if fixpaths: | 2259 if fixpaths: |
2220 f = f.replace('/', pycompat.ossep) | 2260 f = f.replace(b'/', pycompat.ossep) |
2221 if fullpaths: | 2261 if fullpaths: |
2222 addfile(f) | 2262 addfile(f) |
2223 continue | 2263 continue |
2224 s = f.find(pycompat.ossep, speclen) | 2264 s = f.find(pycompat.ossep, speclen) |
2225 if s >= 0: | 2265 if s >= 0: |
2226 adddir(f[:s]) | 2266 adddir(f[:s]) |
2227 else: | 2267 else: |
2228 addfile(f) | 2268 addfile(f) |
2229 return files, dirs | 2269 return files, dirs |
2230 | 2270 |
2231 acceptable = '' | 2271 acceptable = b'' |
2232 if opts[r'normal']: | 2272 if opts[r'normal']: |
2233 acceptable += 'nm' | 2273 acceptable += b'nm' |
2234 if opts[r'added']: | 2274 if opts[r'added']: |
2235 acceptable += 'a' | 2275 acceptable += b'a' |
2236 if opts[r'removed']: | 2276 if opts[r'removed']: |
2237 acceptable += 'r' | 2277 acceptable += b'r' |
2238 cwd = repo.getcwd() | 2278 cwd = repo.getcwd() |
2239 if not specs: | 2279 if not specs: |
2240 specs = ['.'] | 2280 specs = [b'.'] |
2241 | 2281 |
2242 files, dirs = set(), set() | 2282 files, dirs = set(), set() |
2243 for spec in specs: | 2283 for spec in specs: |
2244 f, d = complete(spec, acceptable or 'nmar') | 2284 f, d = complete(spec, acceptable or b'nmar') |
2245 files.update(f) | 2285 files.update(f) |
2246 dirs.update(d) | 2286 dirs.update(d) |
2247 files.update(dirs) | 2287 files.update(dirs) |
2248 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files))) | 2288 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files))) |
2249 ui.write('\n') | 2289 ui.write(b'\n') |
2250 | 2290 |
2251 | 2291 |
2252 @command( | 2292 @command( |
2253 'debugpathcopies', | 2293 b'debugpathcopies', |
2254 cmdutil.walkopts, | 2294 cmdutil.walkopts, |
2255 'hg debugpathcopies REV1 REV2 [FILE]', | 2295 b'hg debugpathcopies REV1 REV2 [FILE]', |
2256 inferrepo=True, | 2296 inferrepo=True, |
2257 ) | 2297 ) |
2258 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts): | 2298 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts): |
2259 """show copies between two revisions""" | 2299 """show copies between two revisions""" |
2260 ctx1 = scmutil.revsingle(repo, rev1) | 2300 ctx1 = scmutil.revsingle(repo, rev1) |
2261 ctx2 = scmutil.revsingle(repo, rev2) | 2301 ctx2 = scmutil.revsingle(repo, rev2) |
2262 m = scmutil.match(ctx1, pats, opts) | 2302 m = scmutil.match(ctx1, pats, opts) |
2263 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()): | 2303 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()): |
2264 ui.write('%s -> %s\n' % (src, dst)) | 2304 ui.write(b'%s -> %s\n' % (src, dst)) |
2265 | 2305 |
2266 | 2306 |
2267 @command('debugpeer', [], _('PATH'), norepo=True) | 2307 @command(b'debugpeer', [], _(b'PATH'), norepo=True) |
2268 def debugpeer(ui, path): | 2308 def debugpeer(ui, path): |
2269 """establish a connection to a peer repository""" | 2309 """establish a connection to a peer repository""" |
2270 # Always enable peer request logging. Requires --debug to display | 2310 # Always enable peer request logging. Requires --debug to display |
2271 # though. | 2311 # though. |
2272 overrides = { | 2312 overrides = { |
2273 ('devel', 'debug.peer-request'): True, | 2313 (b'devel', b'debug.peer-request'): True, |
2274 } | 2314 } |
2275 | 2315 |
2276 with ui.configoverride(overrides): | 2316 with ui.configoverride(overrides): |
2277 peer = hg.peer(ui, {}, path) | 2317 peer = hg.peer(ui, {}, path) |
2278 | 2318 |
2279 local = peer.local() is not None | 2319 local = peer.local() is not None |
2280 canpush = peer.canpush() | 2320 canpush = peer.canpush() |
2281 | 2321 |
2282 ui.write(_('url: %s\n') % peer.url()) | 2322 ui.write(_(b'url: %s\n') % peer.url()) |
2283 ui.write(_('local: %s\n') % (_('yes') if local else _('no'))) | 2323 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no'))) |
2284 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no'))) | 2324 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))) |
2285 | 2325 |
2286 | 2326 |
2287 @command( | 2327 @command( |
2288 'debugpickmergetool', | 2328 b'debugpickmergetool', |
2289 [ | 2329 [ |
2290 ('r', 'rev', '', _('check for files in this revision'), _('REV')), | 2330 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')), |
2291 ('', 'changedelete', None, _('emulate merging change and delete')), | 2331 (b'', b'changedelete', None, _(b'emulate merging change and delete')), |
2292 ] | 2332 ] |
2293 + cmdutil.walkopts | 2333 + cmdutil.walkopts |
2294 + cmdutil.mergetoolopts, | 2334 + cmdutil.mergetoolopts, |
2295 _('[PATTERN]...'), | 2335 _(b'[PATTERN]...'), |
2296 inferrepo=True, | 2336 inferrepo=True, |
2297 ) | 2337 ) |
2298 def debugpickmergetool(ui, repo, *pats, **opts): | 2338 def debugpickmergetool(ui, repo, *pats, **opts): |
2299 """examine which merge tool is chosen for specified file | 2339 """examine which merge tool is chosen for specified file |
2300 | 2340 |
2337 information, even with --debug. In such case, information above is | 2377 information, even with --debug. In such case, information above is |
2338 useful to know why a merge tool is chosen. | 2378 useful to know why a merge tool is chosen. |
2339 """ | 2379 """ |
2340 opts = pycompat.byteskwargs(opts) | 2380 opts = pycompat.byteskwargs(opts) |
2341 overrides = {} | 2381 overrides = {} |
2342 if opts['tool']: | 2382 if opts[b'tool']: |
2343 overrides[('ui', 'forcemerge')] = opts['tool'] | 2383 overrides[(b'ui', b'forcemerge')] = opts[b'tool'] |
2344 ui.note('with --tool %r\n' % (pycompat.bytestr(opts['tool']))) | 2384 ui.note(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool']))) |
2345 | 2385 |
2346 with ui.configoverride(overrides, 'debugmergepatterns'): | 2386 with ui.configoverride(overrides, b'debugmergepatterns'): |
2347 hgmerge = encoding.environ.get("HGMERGE") | 2387 hgmerge = encoding.environ.get(b"HGMERGE") |
2348 if hgmerge is not None: | 2388 if hgmerge is not None: |
2349 ui.note('with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge))) | 2389 ui.note(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge))) |
2350 uimerge = ui.config("ui", "merge") | 2390 uimerge = ui.config(b"ui", b"merge") |
2351 if uimerge: | 2391 if uimerge: |
2352 ui.note('with ui.merge=%r\n' % (pycompat.bytestr(uimerge))) | 2392 ui.note(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge))) |
2353 | 2393 |
2354 ctx = scmutil.revsingle(repo, opts.get('rev')) | 2394 ctx = scmutil.revsingle(repo, opts.get(b'rev')) |
2355 m = scmutil.match(ctx, pats, opts) | 2395 m = scmutil.match(ctx, pats, opts) |
2356 changedelete = opts['changedelete'] | 2396 changedelete = opts[b'changedelete'] |
2357 for path in ctx.walk(m): | 2397 for path in ctx.walk(m): |
2358 fctx = ctx[path] | 2398 fctx = ctx[path] |
2359 try: | 2399 try: |
2360 if not ui.debugflag: | 2400 if not ui.debugflag: |
2361 ui.pushbuffer(error=True) | 2401 ui.pushbuffer(error=True) |
2362 tool, toolpath = filemerge._picktool( | 2402 tool, toolpath = filemerge._picktool( |
2363 repo, | 2403 repo, |
2364 ui, | 2404 ui, |
2365 path, | 2405 path, |
2366 fctx.isbinary(), | 2406 fctx.isbinary(), |
2367 'l' in fctx.flags(), | 2407 b'l' in fctx.flags(), |
2368 changedelete, | 2408 changedelete, |
2369 ) | 2409 ) |
2370 finally: | 2410 finally: |
2371 if not ui.debugflag: | 2411 if not ui.debugflag: |
2372 ui.popbuffer() | 2412 ui.popbuffer() |
2373 ui.write('%s = %s\n' % (path, tool)) | 2413 ui.write(b'%s = %s\n' % (path, tool)) |
2374 | 2414 |
2375 | 2415 |
2376 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True) | 2416 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True) |
2377 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts): | 2417 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts): |
2378 '''access the pushkey key/value protocol | 2418 '''access the pushkey key/value protocol |
2379 | 2419 |
2380 With two args, list the keys in the given namespace. | 2420 With two args, list the keys in the given namespace. |
2381 | 2421 |
2386 target = hg.peer(ui, {}, repopath) | 2426 target = hg.peer(ui, {}, repopath) |
2387 if keyinfo: | 2427 if keyinfo: |
2388 key, old, new = keyinfo | 2428 key, old, new = keyinfo |
2389 with target.commandexecutor() as e: | 2429 with target.commandexecutor() as e: |
2390 r = e.callcommand( | 2430 r = e.callcommand( |
2391 'pushkey', | 2431 b'pushkey', |
2392 {'namespace': namespace, 'key': key, 'old': old, 'new': new,}, | 2432 { |
2433 b'namespace': namespace, | |
2434 b'key': key, | |
2435 b'old': old, | |
2436 b'new': new, | |
2437 }, | |
2393 ).result() | 2438 ).result() |
2394 | 2439 |
2395 ui.status(pycompat.bytestr(r) + '\n') | 2440 ui.status(pycompat.bytestr(r) + b'\n') |
2396 return not r | 2441 return not r |
2397 else: | 2442 else: |
2398 for k, v in sorted(target.listkeys(namespace).iteritems()): | 2443 for k, v in sorted(target.listkeys(namespace).iteritems()): |
2399 ui.write( | 2444 ui.write( |
2400 "%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v)) | 2445 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v)) |
2401 ) | 2446 ) |
2402 | 2447 |
2403 | 2448 |
2404 @command('debugpvec', [], _('A B')) | 2449 @command(b'debugpvec', [], _(b'A B')) |
2405 def debugpvec(ui, repo, a, b=None): | 2450 def debugpvec(ui, repo, a, b=None): |
2406 ca = scmutil.revsingle(repo, a) | 2451 ca = scmutil.revsingle(repo, a) |
2407 cb = scmutil.revsingle(repo, b) | 2452 cb = scmutil.revsingle(repo, b) |
2408 pa = pvec.ctxpvec(ca) | 2453 pa = pvec.ctxpvec(ca) |
2409 pb = pvec.ctxpvec(cb) | 2454 pb = pvec.ctxpvec(cb) |
2410 if pa == pb: | 2455 if pa == pb: |
2411 rel = "=" | 2456 rel = b"=" |
2412 elif pa > pb: | 2457 elif pa > pb: |
2413 rel = ">" | 2458 rel = b">" |
2414 elif pa < pb: | 2459 elif pa < pb: |
2415 rel = "<" | 2460 rel = b"<" |
2416 elif pa | pb: | 2461 elif pa | pb: |
2417 rel = "|" | 2462 rel = b"|" |
2418 ui.write(_("a: %s\n") % pa) | 2463 ui.write(_(b"a: %s\n") % pa) |
2419 ui.write(_("b: %s\n") % pb) | 2464 ui.write(_(b"b: %s\n") % pb) |
2420 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth)) | 2465 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth)) |
2421 ui.write( | 2466 ui.write( |
2422 _("delta: %d hdist: %d distance: %d relation: %s\n") | 2467 _(b"delta: %d hdist: %d distance: %d relation: %s\n") |
2423 % ( | 2468 % ( |
2424 abs(pa._depth - pb._depth), | 2469 abs(pa._depth - pb._depth), |
2425 pvec._hamming(pa._vec, pb._vec), | 2470 pvec._hamming(pa._vec, pb._vec), |
2426 pa.distance(pb), | 2471 pa.distance(pb), |
2427 rel, | 2472 rel, |
2428 ) | 2473 ) |
2429 ) | 2474 ) |
2430 | 2475 |
2431 | 2476 |
2432 @command( | 2477 @command( |
2433 'debugrebuilddirstate|debugrebuildstate', | 2478 b'debugrebuilddirstate|debugrebuildstate', |
2434 [ | 2479 [ |
2435 ('r', 'rev', '', _('revision to rebuild to'), _('REV')), | 2480 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')), |
2436 ( | 2481 ( |
2437 '', | 2482 b'', |
2438 'minimal', | 2483 b'minimal', |
2439 None, | 2484 None, |
2440 _( | 2485 _( |
2441 'only rebuild files that are inconsistent with ' | 2486 b'only rebuild files that are inconsistent with ' |
2442 'the working copy parent' | 2487 b'the working copy parent' |
2443 ), | 2488 ), |
2444 ), | 2489 ), |
2445 ], | 2490 ], |
2446 _('[-r REV]'), | 2491 _(b'[-r REV]'), |
2447 ) | 2492 ) |
2448 def debugrebuilddirstate(ui, repo, rev, **opts): | 2493 def debugrebuilddirstate(ui, repo, rev, **opts): |
2449 """rebuild the dirstate as it would look like for the given revision | 2494 """rebuild the dirstate as it would look like for the given revision |
2450 | 2495 |
2451 If no revision is specified the first current parent will be used. | 2496 If no revision is specified the first current parent will be used. |
2470 if opts.get(r'minimal'): | 2515 if opts.get(r'minimal'): |
2471 manifestfiles = set(ctx.manifest().keys()) | 2516 manifestfiles = set(ctx.manifest().keys()) |
2472 dirstatefiles = set(dirstate) | 2517 dirstatefiles = set(dirstate) |
2473 manifestonly = manifestfiles - dirstatefiles | 2518 manifestonly = manifestfiles - dirstatefiles |
2474 dsonly = dirstatefiles - manifestfiles | 2519 dsonly = dirstatefiles - manifestfiles |
2475 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a') | 2520 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a') |
2476 changedfiles = manifestonly | dsnotadded | 2521 changedfiles = manifestonly | dsnotadded |
2477 | 2522 |
2478 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles) | 2523 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles) |
2479 | 2524 |
2480 | 2525 |
2481 @command('debugrebuildfncache', [], '') | 2526 @command(b'debugrebuildfncache', [], b'') |
2482 def debugrebuildfncache(ui, repo): | 2527 def debugrebuildfncache(ui, repo): |
2483 """rebuild the fncache file""" | 2528 """rebuild the fncache file""" |
2484 repair.rebuildfncache(ui, repo) | 2529 repair.rebuildfncache(ui, repo) |
2485 | 2530 |
2486 | 2531 |
2487 @command( | 2532 @command( |
2488 'debugrename', | 2533 b'debugrename', |
2489 [('r', 'rev', '', _('revision to debug'), _('REV'))], | 2534 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))], |
2490 _('[-r REV] [FILE]...'), | 2535 _(b'[-r REV] [FILE]...'), |
2491 ) | 2536 ) |
2492 def debugrename(ui, repo, *pats, **opts): | 2537 def debugrename(ui, repo, *pats, **opts): |
2493 """dump rename information""" | 2538 """dump rename information""" |
2494 | 2539 |
2495 opts = pycompat.byteskwargs(opts) | 2540 opts = pycompat.byteskwargs(opts) |
2496 ctx = scmutil.revsingle(repo, opts.get('rev')) | 2541 ctx = scmutil.revsingle(repo, opts.get(b'rev')) |
2497 m = scmutil.match(ctx, pats, opts) | 2542 m = scmutil.match(ctx, pats, opts) |
2498 for abs in ctx.walk(m): | 2543 for abs in ctx.walk(m): |
2499 fctx = ctx[abs] | 2544 fctx = ctx[abs] |
2500 o = fctx.filelog().renamed(fctx.filenode()) | 2545 o = fctx.filelog().renamed(fctx.filenode()) |
2501 rel = repo.pathto(abs) | 2546 rel = repo.pathto(abs) |
2502 if o: | 2547 if o: |
2503 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1]))) | 2548 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1]))) |
2504 else: | 2549 else: |
2505 ui.write(_("%s not renamed\n") % rel) | 2550 ui.write(_(b"%s not renamed\n") % rel) |
2506 | 2551 |
2507 | 2552 |
2508 @command( | 2553 @command( |
2509 'debugrevlog', | 2554 b'debugrevlog', |
2510 cmdutil.debugrevlogopts + [('d', 'dump', False, _('dump index data'))], | 2555 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))], |
2511 _('-c|-m|FILE'), | 2556 _(b'-c|-m|FILE'), |
2512 optionalrepo=True, | 2557 optionalrepo=True, |
2513 ) | 2558 ) |
2514 def debugrevlog(ui, repo, file_=None, **opts): | 2559 def debugrevlog(ui, repo, file_=None, **opts): |
2515 """show data and statistics about a revlog""" | 2560 """show data and statistics about a revlog""" |
2516 opts = pycompat.byteskwargs(opts) | 2561 opts = pycompat.byteskwargs(opts) |
2517 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts) | 2562 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts) |
2518 | 2563 |
2519 if opts.get("dump"): | 2564 if opts.get(b"dump"): |
2520 numrevs = len(r) | 2565 numrevs = len(r) |
2521 ui.write( | 2566 ui.write( |
2522 ( | 2567 ( |
2523 "# rev p1rev p2rev start end deltastart base p1 p2" | 2568 b"# rev p1rev p2rev start end deltastart base p1 p2" |
2524 " rawsize totalsize compression heads chainlen\n" | 2569 b" rawsize totalsize compression heads chainlen\n" |
2525 ) | 2570 ) |
2526 ) | 2571 ) |
2527 ts = 0 | 2572 ts = 0 |
2528 heads = set() | 2573 heads = set() |
2529 | 2574 |
2541 try: | 2586 try: |
2542 compression = ts / r.end(rev) | 2587 compression = ts / r.end(rev) |
2543 except ZeroDivisionError: | 2588 except ZeroDivisionError: |
2544 compression = 0 | 2589 compression = 0 |
2545 ui.write( | 2590 ui.write( |
2546 "%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d " | 2591 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d " |
2547 "%11d %5d %8d\n" | 2592 b"%11d %5d %8d\n" |
2548 % ( | 2593 % ( |
2549 rev, | 2594 rev, |
2550 p1, | 2595 p1, |
2551 p2, | 2596 p2, |
2552 r.start(rev), | 2597 r.start(rev), |
2567 v = r.version | 2612 v = r.version |
2568 format = v & 0xFFFF | 2613 format = v & 0xFFFF |
2569 flags = [] | 2614 flags = [] |
2570 gdelta = False | 2615 gdelta = False |
2571 if v & revlog.FLAG_INLINE_DATA: | 2616 if v & revlog.FLAG_INLINE_DATA: |
2572 flags.append('inline') | 2617 flags.append(b'inline') |
2573 if v & revlog.FLAG_GENERALDELTA: | 2618 if v & revlog.FLAG_GENERALDELTA: |
2574 gdelta = True | 2619 gdelta = True |
2575 flags.append('generaldelta') | 2620 flags.append(b'generaldelta') |
2576 if not flags: | 2621 if not flags: |
2577 flags = ['(none)'] | 2622 flags = [b'(none)'] |
2578 | 2623 |
2579 ### tracks merge vs single parent | 2624 ### tracks merge vs single parent |
2580 nummerges = 0 | 2625 nummerges = 0 |
2581 | 2626 |
2582 ### tracks ways the "delta" are build | 2627 ### tracks ways the "delta" are build |
2674 nump2 += 1 | 2719 nump2 += 1 |
2675 elif delta != nullrev: | 2720 elif delta != nullrev: |
2676 numother += 1 | 2721 numother += 1 |
2677 | 2722 |
2678 # Obtain data on the raw chunks in the revlog. | 2723 # Obtain data on the raw chunks in the revlog. |
2679 if util.safehasattr(r, '_getsegmentforrevs'): | 2724 if util.safehasattr(r, b'_getsegmentforrevs'): |
2680 segment = r._getsegmentforrevs(rev, rev)[1] | 2725 segment = r._getsegmentforrevs(rev, rev)[1] |
2681 else: | 2726 else: |
2682 segment = r._revlog._getsegmentforrevs(rev, rev)[1] | 2727 segment = r._revlog._getsegmentforrevs(rev, rev)[1] |
2683 if segment: | 2728 if segment: |
2684 chunktype = bytes(segment[0:1]) | 2729 chunktype = bytes(segment[0:1]) |
2685 else: | 2730 else: |
2686 chunktype = 'empty' | 2731 chunktype = b'empty' |
2687 | 2732 |
2688 if chunktype not in chunktypecounts: | 2733 if chunktype not in chunktypecounts: |
2689 chunktypecounts[chunktype] = 0 | 2734 chunktypecounts[chunktype] = 0 |
2690 chunktypesizes[chunktype] = 0 | 2735 chunktypesizes[chunktype] = 0 |
2691 | 2736 |
2723 maxchainspan = max(chainspans) | 2768 maxchainspan = max(chainspans) |
2724 compratio = 1 | 2769 compratio = 1 |
2725 if totalsize: | 2770 if totalsize: |
2726 compratio = totalrawsize / totalsize | 2771 compratio = totalrawsize / totalsize |
2727 | 2772 |
2728 basedfmtstr = '%%%dd\n' | 2773 basedfmtstr = b'%%%dd\n' |
2729 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n' | 2774 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n' |
2730 | 2775 |
2731 def dfmtstr(max): | 2776 def dfmtstr(max): |
2732 return basedfmtstr % len(str(max)) | 2777 return basedfmtstr % len(str(max)) |
2733 | 2778 |
2734 def pcfmtstr(max, padding=0): | 2779 def pcfmtstr(max, padding=0): |
2735 return basepcfmtstr % (len(str(max)), ' ' * padding) | 2780 return basepcfmtstr % (len(str(max)), b' ' * padding) |
2736 | 2781 |
2737 def pcfmt(value, total): | 2782 def pcfmt(value, total): |
2738 if total: | 2783 if total: |
2739 return (value, 100 * float(value) / total) | 2784 return (value, 100 * float(value) / total) |
2740 else: | 2785 else: |
2741 return value, 100.0 | 2786 return value, 100.0 |
2742 | 2787 |
2743 ui.write('format : %d\n' % format) | 2788 ui.write(b'format : %d\n' % format) |
2744 ui.write('flags : %s\n' % ', '.join(flags)) | 2789 ui.write(b'flags : %s\n' % b', '.join(flags)) |
2745 | 2790 |
2746 ui.write('\n') | 2791 ui.write(b'\n') |
2747 fmt = pcfmtstr(totalsize) | 2792 fmt = pcfmtstr(totalsize) |
2748 fmt2 = dfmtstr(totalsize) | 2793 fmt2 = dfmtstr(totalsize) |
2749 ui.write('revisions : ' + fmt2 % numrevs) | 2794 ui.write(b'revisions : ' + fmt2 % numrevs) |
2750 ui.write(' merges : ' + fmt % pcfmt(nummerges, numrevs)) | 2795 ui.write(b' merges : ' + fmt % pcfmt(nummerges, numrevs)) |
2751 ui.write(' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)) | 2796 ui.write(b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)) |
2752 ui.write('revisions : ' + fmt2 % numrevs) | 2797 ui.write(b'revisions : ' + fmt2 % numrevs) |
2753 ui.write(' empty : ' + fmt % pcfmt(numempty, numrevs)) | 2798 ui.write(b' empty : ' + fmt % pcfmt(numempty, numrevs)) |
2754 ui.write( | 2799 ui.write( |
2755 ' text : ' | 2800 b' text : ' |
2756 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta) | 2801 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta) |
2757 ) | 2802 ) |
2758 ui.write( | 2803 ui.write( |
2759 ' delta : ' | 2804 b' delta : ' |
2760 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta) | 2805 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta) |
2761 ) | 2806 ) |
2762 ui.write(' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)) | 2807 ui.write(b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)) |
2763 for depth in sorted(numsnapdepth): | 2808 for depth in sorted(numsnapdepth): |
2764 ui.write( | 2809 ui.write( |
2765 (' lvl-%-3d : ' % depth) | 2810 (b' lvl-%-3d : ' % depth) |
2766 + fmt % pcfmt(numsnapdepth[depth], numrevs) | 2811 + fmt % pcfmt(numsnapdepth[depth], numrevs) |
2767 ) | 2812 ) |
2768 ui.write(' deltas : ' + fmt % pcfmt(numdeltas, numrevs)) | 2813 ui.write(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs)) |
2769 ui.write('revision size : ' + fmt2 % totalsize) | 2814 ui.write(b'revision size : ' + fmt2 % totalsize) |
2770 ui.write(' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)) | 2815 ui.write( |
2816 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize) | |
2817 ) | |
2771 for depth in sorted(numsnapdepth): | 2818 for depth in sorted(numsnapdepth): |
2772 ui.write( | 2819 ui.write( |
2773 (' lvl-%-3d : ' % depth) | 2820 (b' lvl-%-3d : ' % depth) |
2774 + fmt % pcfmt(snaptotal[depth], totalsize) | 2821 + fmt % pcfmt(snaptotal[depth], totalsize) |
2775 ) | 2822 ) |
2776 ui.write(' deltas : ' + fmt % pcfmt(deltatotal, totalsize)) | 2823 ui.write(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize)) |
2777 | 2824 |
2778 def fmtchunktype(chunktype): | 2825 def fmtchunktype(chunktype): |
2779 if chunktype == 'empty': | 2826 if chunktype == b'empty': |
2780 return ' %s : ' % chunktype | 2827 return b' %s : ' % chunktype |
2781 elif chunktype in pycompat.bytestr(string.ascii_letters): | 2828 elif chunktype in pycompat.bytestr(string.ascii_letters): |
2782 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype) | 2829 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype) |
2783 else: | 2830 else: |
2784 return ' 0x%s : ' % hex(chunktype) | 2831 return b' 0x%s : ' % hex(chunktype) |
2785 | 2832 |
2786 ui.write('\n') | 2833 ui.write(b'\n') |
2787 ui.write('chunks : ' + fmt2 % numrevs) | 2834 ui.write(b'chunks : ' + fmt2 % numrevs) |
2788 for chunktype in sorted(chunktypecounts): | 2835 for chunktype in sorted(chunktypecounts): |
2789 ui.write(fmtchunktype(chunktype)) | 2836 ui.write(fmtchunktype(chunktype)) |
2790 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs)) | 2837 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs)) |
2791 ui.write('chunks size : ' + fmt2 % totalsize) | 2838 ui.write(b'chunks size : ' + fmt2 % totalsize) |
2792 for chunktype in sorted(chunktypecounts): | 2839 for chunktype in sorted(chunktypecounts): |
2793 ui.write(fmtchunktype(chunktype)) | 2840 ui.write(fmtchunktype(chunktype)) |
2794 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize)) | 2841 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize)) |
2795 | 2842 |
2796 ui.write('\n') | 2843 ui.write(b'\n') |
2797 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio)) | 2844 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio)) |
2798 ui.write('avg chain length : ' + fmt % avgchainlen) | 2845 ui.write(b'avg chain length : ' + fmt % avgchainlen) |
2799 ui.write('max chain length : ' + fmt % maxchainlen) | 2846 ui.write(b'max chain length : ' + fmt % maxchainlen) |
2800 ui.write('max chain reach : ' + fmt % maxchainspan) | 2847 ui.write(b'max chain reach : ' + fmt % maxchainspan) |
2801 ui.write('compression ratio : ' + fmt % compratio) | 2848 ui.write(b'compression ratio : ' + fmt % compratio) |
2802 | 2849 |
2803 if format > 0: | 2850 if format > 0: |
2804 ui.write('\n') | 2851 ui.write(b'\n') |
2805 ui.write( | 2852 ui.write( |
2806 'uncompressed data size (min/max/avg) : %d / %d / %d\n' | 2853 b'uncompressed data size (min/max/avg) : %d / %d / %d\n' |
2807 % tuple(datasize) | 2854 % tuple(datasize) |
2808 ) | 2855 ) |
2809 ui.write( | 2856 ui.write( |
2810 'full revision size (min/max/avg) : %d / %d / %d\n' | 2857 b'full revision size (min/max/avg) : %d / %d / %d\n' |
2811 % tuple(fullsize) | 2858 % tuple(fullsize) |
2812 ) | 2859 ) |
2813 ui.write( | 2860 ui.write( |
2814 'inter-snapshot size (min/max/avg) : %d / %d / %d\n' | 2861 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n' |
2815 % tuple(semisize) | 2862 % tuple(semisize) |
2816 ) | 2863 ) |
2817 for depth in sorted(snapsizedepth): | 2864 for depth in sorted(snapsizedepth): |
2818 if depth == 0: | 2865 if depth == 0: |
2819 continue | 2866 continue |
2820 ui.write( | 2867 ui.write( |
2821 ' level-%-3d (min/max/avg) : %d / %d / %d\n' | 2868 b' level-%-3d (min/max/avg) : %d / %d / %d\n' |
2822 % ((depth,) + tuple(snapsizedepth[depth])) | 2869 % ((depth,) + tuple(snapsizedepth[depth])) |
2823 ) | 2870 ) |
2824 ui.write( | 2871 ui.write( |
2825 'delta size (min/max/avg) : %d / %d / %d\n' | 2872 b'delta size (min/max/avg) : %d / %d / %d\n' |
2826 % tuple(deltasize) | 2873 % tuple(deltasize) |
2827 ) | 2874 ) |
2828 | 2875 |
2829 if numdeltas > 0: | 2876 if numdeltas > 0: |
2830 ui.write('\n') | 2877 ui.write(b'\n') |
2831 fmt = pcfmtstr(numdeltas) | 2878 fmt = pcfmtstr(numdeltas) |
2832 fmt2 = pcfmtstr(numdeltas, 4) | 2879 fmt2 = pcfmtstr(numdeltas, 4) |
2833 ui.write('deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)) | 2880 ui.write(b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)) |
2834 if numprev > 0: | 2881 if numprev > 0: |
2835 ui.write( | 2882 ui.write( |
2836 ' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev) | 2883 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev) |
2837 ) | 2884 ) |
2838 ui.write( | 2885 ui.write( |
2839 ' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev) | 2886 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev) |
2840 ) | 2887 ) |
2841 ui.write( | 2888 ui.write( |
2842 ' other : ' + fmt2 % pcfmt(numoprev, numprev) | 2889 b' other : ' + fmt2 % pcfmt(numoprev, numprev) |
2843 ) | 2890 ) |
2844 if gdelta: | 2891 if gdelta: |
2845 ui.write('deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)) | 2892 ui.write(b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)) |
2846 ui.write('deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)) | 2893 ui.write(b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)) |
2847 ui.write( | 2894 ui.write( |
2848 'deltas against other : ' + fmt % pcfmt(numother, numdeltas) | 2895 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas) |
2849 ) | 2896 ) |
2850 | 2897 |
2851 | 2898 |
2852 @command( | 2899 @command( |
2853 'debugrevlogindex', | 2900 b'debugrevlogindex', |
2854 cmdutil.debugrevlogopts | 2901 cmdutil.debugrevlogopts |
2855 + [('f', 'format', 0, _('revlog format'), _('FORMAT'))], | 2902 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))], |
2856 _('[-f FORMAT] -c|-m|FILE'), | 2903 _(b'[-f FORMAT] -c|-m|FILE'), |
2857 optionalrepo=True, | 2904 optionalrepo=True, |
2858 ) | 2905 ) |
2859 def debugrevlogindex(ui, repo, file_=None, **opts): | 2906 def debugrevlogindex(ui, repo, file_=None, **opts): |
2860 """dump the contents of a revlog index""" | 2907 """dump the contents of a revlog index""" |
2861 opts = pycompat.byteskwargs(opts) | 2908 opts = pycompat.byteskwargs(opts) |
2862 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts) | 2909 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts) |
2863 format = opts.get('format', 0) | 2910 format = opts.get(b'format', 0) |
2864 if format not in (0, 1): | 2911 if format not in (0, 1): |
2865 raise error.Abort(_("unknown format %d") % format) | 2912 raise error.Abort(_(b"unknown format %d") % format) |
2866 | 2913 |
2867 if ui.debugflag: | 2914 if ui.debugflag: |
2868 shortfn = hex | 2915 shortfn = hex |
2869 else: | 2916 else: |
2870 shortfn = short | 2917 shortfn = short |
2876 break | 2923 break |
2877 | 2924 |
2878 if format == 0: | 2925 if format == 0: |
2879 if ui.verbose: | 2926 if ui.verbose: |
2880 ui.write( | 2927 ui.write( |
2881 (" rev offset length linkrev" " %s %s p2\n") | 2928 (b" rev offset length linkrev" b" %s %s p2\n") |
2882 % ("nodeid".ljust(idlen), "p1".ljust(idlen)) | 2929 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen)) |
2883 ) | 2930 ) |
2884 else: | 2931 else: |
2885 ui.write( | 2932 ui.write( |
2886 " rev linkrev %s %s p2\n" | 2933 b" rev linkrev %s %s p2\n" |
2887 % ("nodeid".ljust(idlen), "p1".ljust(idlen)) | 2934 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen)) |
2888 ) | 2935 ) |
2889 elif format == 1: | 2936 elif format == 1: |
2890 if ui.verbose: | 2937 if ui.verbose: |
2891 ui.write( | 2938 ui.write( |
2892 ( | 2939 ( |
2893 " rev flag offset length size link p1" | 2940 b" rev flag offset length size link p1" |
2894 " p2 %s\n" | 2941 b" p2 %s\n" |
2895 ) | 2942 ) |
2896 % "nodeid".rjust(idlen) | 2943 % b"nodeid".rjust(idlen) |
2897 ) | 2944 ) |
2898 else: | 2945 else: |
2899 ui.write( | 2946 ui.write( |
2900 " rev flag size link p1 p2 %s\n" | 2947 b" rev flag size link p1 p2 %s\n" |
2901 % "nodeid".rjust(idlen) | 2948 % b"nodeid".rjust(idlen) |
2902 ) | 2949 ) |
2903 | 2950 |
2904 for i in r: | 2951 for i in r: |
2905 node = r.node(i) | 2952 node = r.node(i) |
2906 if format == 0: | 2953 if format == 0: |
2908 pp = r.parents(node) | 2955 pp = r.parents(node) |
2909 except Exception: | 2956 except Exception: |
2910 pp = [nullid, nullid] | 2957 pp = [nullid, nullid] |
2911 if ui.verbose: | 2958 if ui.verbose: |
2912 ui.write( | 2959 ui.write( |
2913 "% 6d % 9d % 7d % 7d %s %s %s\n" | 2960 b"% 6d % 9d % 7d % 7d %s %s %s\n" |
2914 % ( | 2961 % ( |
2915 i, | 2962 i, |
2916 r.start(i), | 2963 r.start(i), |
2917 r.length(i), | 2964 r.length(i), |
2918 r.linkrev(i), | 2965 r.linkrev(i), |
2921 shortfn(pp[1]), | 2968 shortfn(pp[1]), |
2922 ) | 2969 ) |
2923 ) | 2970 ) |
2924 else: | 2971 else: |
2925 ui.write( | 2972 ui.write( |
2926 "% 6d % 7d %s %s %s\n" | 2973 b"% 6d % 7d %s %s %s\n" |
2927 % ( | 2974 % ( |
2928 i, | 2975 i, |
2929 r.linkrev(i), | 2976 r.linkrev(i), |
2930 shortfn(node), | 2977 shortfn(node), |
2931 shortfn(pp[0]), | 2978 shortfn(pp[0]), |
2934 ) | 2981 ) |
2935 elif format == 1: | 2982 elif format == 1: |
2936 pr = r.parentrevs(i) | 2983 pr = r.parentrevs(i) |
2937 if ui.verbose: | 2984 if ui.verbose: |
2938 ui.write( | 2985 ui.write( |
2939 "% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" | 2986 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" |
2940 % ( | 2987 % ( |
2941 i, | 2988 i, |
2942 r.flags(i), | 2989 r.flags(i), |
2943 r.start(i), | 2990 r.start(i), |
2944 r.length(i), | 2991 r.length(i), |
2949 shortfn(node), | 2996 shortfn(node), |
2950 ) | 2997 ) |
2951 ) | 2998 ) |
2952 else: | 2999 else: |
2953 ui.write( | 3000 ui.write( |
2954 "% 6d %04x % 8d % 6d % 6d % 6d %s\n" | 3001 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n" |
2955 % ( | 3002 % ( |
2956 i, | 3003 i, |
2957 r.flags(i), | 3004 r.flags(i), |
2958 r.rawsize(i), | 3005 r.rawsize(i), |
2959 r.linkrev(i), | 3006 r.linkrev(i), |
2963 ) | 3010 ) |
2964 ) | 3011 ) |
2965 | 3012 |
2966 | 3013 |
2967 @command( | 3014 @command( |
2968 'debugrevspec', | 3015 b'debugrevspec', |
2969 [ | 3016 [ |
2970 ( | 3017 ( |
2971 '', | 3018 b'', |
2972 'optimize', | 3019 b'optimize', |
2973 None, | 3020 None, |
2974 _('print parsed tree after optimizing (DEPRECATED)'), | 3021 _(b'print parsed tree after optimizing (DEPRECATED)'), |
2975 ), | |
2976 ('', 'show-revs', True, _('print list of result revisions (default)')), | |
2977 ( | |
2978 's', | |
2979 'show-set', | |
2980 None, | |
2981 _('print internal representation of result set'), | |
2982 ), | 3022 ), |
2983 ( | 3023 ( |
2984 'p', | 3024 b'', |
2985 'show-stage', | 3025 b'show-revs', |
3026 True, | |
3027 _(b'print list of result revisions (default)'), | |
3028 ), | |
3029 ( | |
3030 b's', | |
3031 b'show-set', | |
3032 None, | |
3033 _(b'print internal representation of result set'), | |
3034 ), | |
3035 ( | |
3036 b'p', | |
3037 b'show-stage', | |
2986 [], | 3038 [], |
2987 _('print parsed tree at the given stage'), | 3039 _(b'print parsed tree at the given stage'), |
2988 _('NAME'), | 3040 _(b'NAME'), |
2989 ), | 3041 ), |
2990 ('', 'no-optimized', False, _('evaluate tree without optimization')), | 3042 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')), |
2991 ('', 'verify-optimized', False, _('verify optimized result')), | 3043 (b'', b'verify-optimized', False, _(b'verify optimized result')), |
2992 ], | 3044 ], |
2993 'REVSPEC', | 3045 b'REVSPEC', |
2994 ) | 3046 ) |
2995 def debugrevspec(ui, repo, expr, **opts): | 3047 def debugrevspec(ui, repo, expr, **opts): |
2996 """parse and apply a revision specification | 3048 """parse and apply a revision specification |
2997 | 3049 |
2998 Use -p/--show-stage option to print the parsed tree at the given stages. | 3050 Use -p/--show-stage option to print the parsed tree at the given stages. |
3003 | 3055 |
3004 Use --verify-optimized to compare the optimized result with the unoptimized | 3056 Use --verify-optimized to compare the optimized result with the unoptimized |
3005 one. Returns 1 if the optimized result differs. | 3057 one. Returns 1 if the optimized result differs. |
3006 """ | 3058 """ |
3007 opts = pycompat.byteskwargs(opts) | 3059 opts = pycompat.byteskwargs(opts) |
3008 aliases = ui.configitems('revsetalias') | 3060 aliases = ui.configitems(b'revsetalias') |
3009 stages = [ | 3061 stages = [ |
3010 ('parsed', lambda tree: tree), | 3062 (b'parsed', lambda tree: tree), |
3011 ( | 3063 ( |
3012 'expanded', | 3064 b'expanded', |
3013 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn), | 3065 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn), |
3014 ), | 3066 ), |
3015 ('concatenated', revsetlang.foldconcat), | 3067 (b'concatenated', revsetlang.foldconcat), |
3016 ('analyzed', revsetlang.analyze), | 3068 (b'analyzed', revsetlang.analyze), |
3017 ('optimized', revsetlang.optimize), | 3069 (b'optimized', revsetlang.optimize), |
3018 ] | 3070 ] |
3019 if opts['no_optimized']: | 3071 if opts[b'no_optimized']: |
3020 stages = stages[:-1] | 3072 stages = stages[:-1] |
3021 if opts['verify_optimized'] and opts['no_optimized']: | 3073 if opts[b'verify_optimized'] and opts[b'no_optimized']: |
3022 raise error.Abort( | 3074 raise error.Abort( |
3023 _('cannot use --verify-optimized with ' '--no-optimized') | 3075 _(b'cannot use --verify-optimized with ' b'--no-optimized') |
3024 ) | 3076 ) |
3025 stagenames = set(n for n, f in stages) | 3077 stagenames = set(n for n, f in stages) |
3026 | 3078 |
3027 showalways = set() | 3079 showalways = set() |
3028 showchanged = set() | 3080 showchanged = set() |
3029 if ui.verbose and not opts['show_stage']: | 3081 if ui.verbose and not opts[b'show_stage']: |
3030 # show parsed tree by --verbose (deprecated) | 3082 # show parsed tree by --verbose (deprecated) |
3031 showalways.add('parsed') | 3083 showalways.add(b'parsed') |
3032 showchanged.update(['expanded', 'concatenated']) | 3084 showchanged.update([b'expanded', b'concatenated']) |
3033 if opts['optimize']: | 3085 if opts[b'optimize']: |
3034 showalways.add('optimized') | 3086 showalways.add(b'optimized') |
3035 if opts['show_stage'] and opts['optimize']: | 3087 if opts[b'show_stage'] and opts[b'optimize']: |
3036 raise error.Abort(_('cannot use --optimize with --show-stage')) | 3088 raise error.Abort(_(b'cannot use --optimize with --show-stage')) |
3037 if opts['show_stage'] == ['all']: | 3089 if opts[b'show_stage'] == [b'all']: |
3038 showalways.update(stagenames) | 3090 showalways.update(stagenames) |
3039 else: | 3091 else: |
3040 for n in opts['show_stage']: | 3092 for n in opts[b'show_stage']: |
3041 if n not in stagenames: | 3093 if n not in stagenames: |
3042 raise error.Abort(_('invalid stage name: %s') % n) | 3094 raise error.Abort(_(b'invalid stage name: %s') % n) |
3043 showalways.update(opts['show_stage']) | 3095 showalways.update(opts[b'show_stage']) |
3044 | 3096 |
3045 treebystage = {} | 3097 treebystage = {} |
3046 printedtree = None | 3098 printedtree = None |
3047 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo)) | 3099 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo)) |
3048 for n, f in stages: | 3100 for n, f in stages: |
3049 treebystage[n] = tree = f(tree) | 3101 treebystage[n] = tree = f(tree) |
3050 if n in showalways or (n in showchanged and tree != printedtree): | 3102 if n in showalways or (n in showchanged and tree != printedtree): |
3051 if opts['show_stage'] or n != 'parsed': | 3103 if opts[b'show_stage'] or n != b'parsed': |
3052 ui.write("* %s:\n" % n) | 3104 ui.write(b"* %s:\n" % n) |
3053 ui.write(revsetlang.prettyformat(tree), "\n") | 3105 ui.write(revsetlang.prettyformat(tree), b"\n") |
3054 printedtree = tree | 3106 printedtree = tree |
3055 | 3107 |
3056 if opts['verify_optimized']: | 3108 if opts[b'verify_optimized']: |
3057 arevs = revset.makematcher(treebystage['analyzed'])(repo) | 3109 arevs = revset.makematcher(treebystage[b'analyzed'])(repo) |
3058 brevs = revset.makematcher(treebystage['optimized'])(repo) | 3110 brevs = revset.makematcher(treebystage[b'optimized'])(repo) |
3059 if opts['show_set'] or (opts['show_set'] is None and ui.verbose): | 3111 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose): |
3060 ui.write("* analyzed set:\n", stringutil.prettyrepr(arevs), "\n") | 3112 ui.write(b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n") |
3061 ui.write("* optimized set:\n", stringutil.prettyrepr(brevs), "\n") | 3113 ui.write(b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n") |
3062 arevs = list(arevs) | 3114 arevs = list(arevs) |
3063 brevs = list(brevs) | 3115 brevs = list(brevs) |
3064 if arevs == brevs: | 3116 if arevs == brevs: |
3065 return 0 | 3117 return 0 |
3066 ui.write('--- analyzed\n', label='diff.file_a') | 3118 ui.write(b'--- analyzed\n', label=b'diff.file_a') |
3067 ui.write('+++ optimized\n', label='diff.file_b') | 3119 ui.write(b'+++ optimized\n', label=b'diff.file_b') |
3068 sm = difflib.SequenceMatcher(None, arevs, brevs) | 3120 sm = difflib.SequenceMatcher(None, arevs, brevs) |
3069 for tag, alo, ahi, blo, bhi in sm.get_opcodes(): | 3121 for tag, alo, ahi, blo, bhi in sm.get_opcodes(): |
3070 if tag in (r'delete', r'replace'): | 3122 if tag in (r'delete', r'replace'): |
3071 for c in arevs[alo:ahi]: | 3123 for c in arevs[alo:ahi]: |
3072 ui.write('-%d\n' % c, label='diff.deleted') | 3124 ui.write(b'-%d\n' % c, label=b'diff.deleted') |
3073 if tag in (r'insert', r'replace'): | 3125 if tag in (r'insert', r'replace'): |
3074 for c in brevs[blo:bhi]: | 3126 for c in brevs[blo:bhi]: |
3075 ui.write('+%d\n' % c, label='diff.inserted') | 3127 ui.write(b'+%d\n' % c, label=b'diff.inserted') |
3076 if tag == r'equal': | 3128 if tag == r'equal': |
3077 for c in arevs[alo:ahi]: | 3129 for c in arevs[alo:ahi]: |
3078 ui.write(' %d\n' % c) | 3130 ui.write(b' %d\n' % c) |
3079 return 1 | 3131 return 1 |
3080 | 3132 |
3081 func = revset.makematcher(tree) | 3133 func = revset.makematcher(tree) |
3082 revs = func(repo) | 3134 revs = func(repo) |
3083 if opts['show_set'] or (opts['show_set'] is None and ui.verbose): | 3135 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose): |
3084 ui.write("* set:\n", stringutil.prettyrepr(revs), "\n") | 3136 ui.write(b"* set:\n", stringutil.prettyrepr(revs), b"\n") |
3085 if not opts['show_revs']: | 3137 if not opts[b'show_revs']: |
3086 return | 3138 return |
3087 for c in revs: | 3139 for c in revs: |
3088 ui.write("%d\n" % c) | 3140 ui.write(b"%d\n" % c) |
3089 | 3141 |
3090 | 3142 |
3091 @command( | 3143 @command( |
3092 'debugserve', | 3144 b'debugserve', |
3093 [ | 3145 [ |
3094 ( | 3146 ( |
3095 '', | 3147 b'', |
3096 'sshstdio', | 3148 b'sshstdio', |
3097 False, | 3149 False, |
3098 _('run an SSH server bound to process handles'), | 3150 _(b'run an SSH server bound to process handles'), |
3099 ), | 3151 ), |
3100 ('', 'logiofd', '', _('file descriptor to log server I/O to')), | 3152 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')), |
3101 ('', 'logiofile', '', _('file to log server I/O to')), | 3153 (b'', b'logiofile', b'', _(b'file to log server I/O to')), |
3102 ], | 3154 ], |
3103 '', | 3155 b'', |
3104 ) | 3156 ) |
3105 def debugserve(ui, repo, **opts): | 3157 def debugserve(ui, repo, **opts): |
3106 """run a server with advanced settings | 3158 """run a server with advanced settings |
3107 | 3159 |
3108 This command is similar to :hg:`serve`. It exists partially as a | 3160 This command is similar to :hg:`serve`. It exists partially as a |
3109 workaround to the fact that ``hg serve --stdio`` must have specific | 3161 workaround to the fact that ``hg serve --stdio`` must have specific |
3110 arguments for security reasons. | 3162 arguments for security reasons. |
3111 """ | 3163 """ |
3112 opts = pycompat.byteskwargs(opts) | 3164 opts = pycompat.byteskwargs(opts) |
3113 | 3165 |
3114 if not opts['sshstdio']: | 3166 if not opts[b'sshstdio']: |
3115 raise error.Abort(_('only --sshstdio is currently supported')) | 3167 raise error.Abort(_(b'only --sshstdio is currently supported')) |
3116 | 3168 |
3117 logfh = None | 3169 logfh = None |
3118 | 3170 |
3119 if opts['logiofd'] and opts['logiofile']: | 3171 if opts[b'logiofd'] and opts[b'logiofile']: |
3120 raise error.Abort(_('cannot use both --logiofd and --logiofile')) | 3172 raise error.Abort(_(b'cannot use both --logiofd and --logiofile')) |
3121 | 3173 |
3122 if opts['logiofd']: | 3174 if opts[b'logiofd']: |
3123 # Line buffered because output is line based. | 3175 # Line buffered because output is line based. |
3124 try: | 3176 try: |
3125 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1) | 3177 logfh = os.fdopen(int(opts[b'logiofd']), r'ab', 1) |
3126 except OSError as e: | 3178 except OSError as e: |
3127 if e.errno != errno.ESPIPE: | 3179 if e.errno != errno.ESPIPE: |
3128 raise | 3180 raise |
3129 # can't seek a pipe, so `ab` mode fails on py3 | 3181 # can't seek a pipe, so `ab` mode fails on py3 |
3130 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1) | 3182 logfh = os.fdopen(int(opts[b'logiofd']), r'wb', 1) |
3131 elif opts['logiofile']: | 3183 elif opts[b'logiofile']: |
3132 logfh = open(opts['logiofile'], 'ab', 1) | 3184 logfh = open(opts[b'logiofile'], b'ab', 1) |
3133 | 3185 |
3134 s = wireprotoserver.sshserver(ui, repo, logfh=logfh) | 3186 s = wireprotoserver.sshserver(ui, repo, logfh=logfh) |
3135 s.serve_forever() | 3187 s.serve_forever() |
3136 | 3188 |
3137 | 3189 |
3138 @command('debugsetparents', [], _('REV1 [REV2]')) | 3190 @command(b'debugsetparents', [], _(b'REV1 [REV2]')) |
3139 def debugsetparents(ui, repo, rev1, rev2=None): | 3191 def debugsetparents(ui, repo, rev1, rev2=None): |
3140 """manually set the parents of the current working directory | 3192 """manually set the parents of the current working directory |
3141 | 3193 |
3142 This is useful for writing repository conversion tools, but should | 3194 This is useful for writing repository conversion tools, but should |
3143 be used with care. For example, neither the working directory nor the | 3195 be used with care. For example, neither the working directory nor the |
3146 | 3198 |
3147 Returns 0 on success. | 3199 Returns 0 on success. |
3148 """ | 3200 """ |
3149 | 3201 |
3150 node1 = scmutil.revsingle(repo, rev1).node() | 3202 node1 = scmutil.revsingle(repo, rev1).node() |
3151 node2 = scmutil.revsingle(repo, rev2, 'null').node() | 3203 node2 = scmutil.revsingle(repo, rev2, b'null').node() |
3152 | 3204 |
3153 with repo.wlock(): | 3205 with repo.wlock(): |
3154 repo.setparents(node1, node2) | 3206 repo.setparents(node1, node2) |
3155 | 3207 |
3156 | 3208 |
3157 @command('debugsidedata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV')) | 3209 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV')) |
3158 def debugsidedata(ui, repo, file_, rev=None, **opts): | 3210 def debugsidedata(ui, repo, file_, rev=None, **opts): |
3159 """dump the side data for a cl/manifest/file revision""" | 3211 """dump the side data for a cl/manifest/file revision""" |
3160 opts = pycompat.byteskwargs(opts) | 3212 opts = pycompat.byteskwargs(opts) |
3161 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'): | 3213 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'): |
3162 if rev is not None: | 3214 if rev is not None: |
3163 raise error.CommandError('debugdata', _('invalid arguments')) | 3215 raise error.CommandError(b'debugdata', _(b'invalid arguments')) |
3164 file_, rev = None, file_ | 3216 file_, rev = None, file_ |
3165 elif rev is None: | 3217 elif rev is None: |
3166 raise error.CommandError('debugdata', _('invalid arguments')) | 3218 raise error.CommandError(b'debugdata', _(b'invalid arguments')) |
3167 r = cmdutil.openstorage(repo, 'debugdata', file_, opts) | 3219 r = cmdutil.openstorage(repo, b'debugdata', file_, opts) |
3168 r = getattr(r, '_revlog', r) | 3220 r = getattr(r, '_revlog', r) |
3169 try: | 3221 try: |
3170 sidedata = r.sidedata(r.lookup(rev)) | 3222 sidedata = r.sidedata(r.lookup(rev)) |
3171 except KeyError: | 3223 except KeyError: |
3172 raise error.Abort(_('invalid revision identifier %s') % rev) | 3224 raise error.Abort(_(b'invalid revision identifier %s') % rev) |
3173 if sidedata: | 3225 if sidedata: |
3174 sidedata = list(sidedata.items()) | 3226 sidedata = list(sidedata.items()) |
3175 sidedata.sort() | 3227 sidedata.sort() |
3176 ui.write(('%d sidedata entries\n' % len(sidedata))) | 3228 ui.write((b'%d sidedata entries\n' % len(sidedata))) |
3177 for key, value in sidedata: | 3229 for key, value in sidedata: |
3178 ui.write((' entry-%04o size %d\n' % (key, len(value)))) | 3230 ui.write((b' entry-%04o size %d\n' % (key, len(value)))) |
3179 if ui.verbose: | 3231 if ui.verbose: |
3180 ui.write((' %s\n' % stringutil.pprint(value))) | 3232 ui.write((b' %s\n' % stringutil.pprint(value))) |
3181 | 3233 |
3182 | 3234 |
3183 @command('debugssl', [], '[SOURCE]', optionalrepo=True) | 3235 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True) |
3184 def debugssl(ui, repo, source=None, **opts): | 3236 def debugssl(ui, repo, source=None, **opts): |
3185 '''test a secure connection to a server | 3237 '''test a secure connection to a server |
3186 | 3238 |
3187 This builds the certificate chain for the server on Windows, installing the | 3239 This builds the certificate chain for the server on Windows, installing the |
3188 missing intermediates and trusted root via Windows Update if necessary. It | 3240 missing intermediates and trusted root via Windows Update if necessary. It |
3194 If the update succeeds, retry the original operation. Otherwise, the cause | 3246 If the update succeeds, retry the original operation. Otherwise, the cause |
3195 of the SSL error is likely another issue. | 3247 of the SSL error is likely another issue. |
3196 ''' | 3248 ''' |
3197 if not pycompat.iswindows: | 3249 if not pycompat.iswindows: |
3198 raise error.Abort( | 3250 raise error.Abort( |
3199 _('certificate chain building is only possible on ' 'Windows') | 3251 _(b'certificate chain building is only possible on ' b'Windows') |
3200 ) | 3252 ) |
3201 | 3253 |
3202 if not source: | 3254 if not source: |
3203 if not repo: | 3255 if not repo: |
3204 raise error.Abort( | 3256 raise error.Abort( |
3205 _( | 3257 _( |
3206 "there is no Mercurial repository here, and no " | 3258 b"there is no Mercurial repository here, and no " |
3207 "server specified" | 3259 b"server specified" |
3208 ) | 3260 ) |
3209 ) | 3261 ) |
3210 source = "default" | 3262 source = b"default" |
3211 | 3263 |
3212 source, branches = hg.parseurl(ui.expandpath(source)) | 3264 source, branches = hg.parseurl(ui.expandpath(source)) |
3213 url = util.url(source) | 3265 url = util.url(source) |
3214 | 3266 |
3215 defaultport = {'https': 443, 'ssh': 22} | 3267 defaultport = {b'https': 443, b'ssh': 22} |
3216 if url.scheme in defaultport: | 3268 if url.scheme in defaultport: |
3217 try: | 3269 try: |
3218 addr = (url.host, int(url.port or defaultport[url.scheme])) | 3270 addr = (url.host, int(url.port or defaultport[url.scheme])) |
3219 except ValueError: | 3271 except ValueError: |
3220 raise error.Abort(_("malformed port number in URL")) | 3272 raise error.Abort(_(b"malformed port number in URL")) |
3221 else: | 3273 else: |
3222 raise error.Abort(_("only https and ssh connections are supported")) | 3274 raise error.Abort(_(b"only https and ssh connections are supported")) |
3223 | 3275 |
3224 from . import win32 | 3276 from . import win32 |
3225 | 3277 |
3226 s = ssl.wrap_socket( | 3278 s = ssl.wrap_socket( |
3227 socket.socket(), | 3279 socket.socket(), |
3232 | 3284 |
3233 try: | 3285 try: |
3234 s.connect(addr) | 3286 s.connect(addr) |
3235 cert = s.getpeercert(True) | 3287 cert = s.getpeercert(True) |
3236 | 3288 |
3237 ui.status(_('checking the certificate chain for %s\n') % url.host) | 3289 ui.status(_(b'checking the certificate chain for %s\n') % url.host) |
3238 | 3290 |
3239 complete = win32.checkcertificatechain(cert, build=False) | 3291 complete = win32.checkcertificatechain(cert, build=False) |
3240 | 3292 |
3241 if not complete: | 3293 if not complete: |
3242 ui.status(_('certificate chain is incomplete, updating... ')) | 3294 ui.status(_(b'certificate chain is incomplete, updating... ')) |
3243 | 3295 |
3244 if not win32.checkcertificatechain(cert): | 3296 if not win32.checkcertificatechain(cert): |
3245 ui.status(_('failed.\n')) | 3297 ui.status(_(b'failed.\n')) |
3246 else: | 3298 else: |
3247 ui.status(_('done.\n')) | 3299 ui.status(_(b'done.\n')) |
3248 else: | 3300 else: |
3249 ui.status(_('full certificate chain is available\n')) | 3301 ui.status(_(b'full certificate chain is available\n')) |
3250 finally: | 3302 finally: |
3251 s.close() | 3303 s.close() |
3252 | 3304 |
3253 | 3305 |
3254 @command( | 3306 @command( |
3255 'debugsub', | 3307 b'debugsub', |
3256 [('r', 'rev', '', _('revision to check'), _('REV'))], | 3308 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))], |
3257 _('[-r REV] [REV]'), | 3309 _(b'[-r REV] [REV]'), |
3258 ) | 3310 ) |
3259 def debugsub(ui, repo, rev=None): | 3311 def debugsub(ui, repo, rev=None): |
3260 ctx = scmutil.revsingle(repo, rev, None) | 3312 ctx = scmutil.revsingle(repo, rev, None) |
3261 for k, v in sorted(ctx.substate.items()): | 3313 for k, v in sorted(ctx.substate.items()): |
3262 ui.write('path %s\n' % k) | 3314 ui.write(b'path %s\n' % k) |
3263 ui.write(' source %s\n' % v[0]) | 3315 ui.write(b' source %s\n' % v[0]) |
3264 ui.write(' revision %s\n' % v[1]) | 3316 ui.write(b' revision %s\n' % v[1]) |
3265 | 3317 |
3266 | 3318 |
3267 @command( | 3319 @command( |
3268 'debugsuccessorssets', | 3320 b'debugsuccessorssets', |
3269 [('', 'closest', False, _('return closest successors sets only'))], | 3321 [(b'', b'closest', False, _(b'return closest successors sets only'))], |
3270 _('[REV]'), | 3322 _(b'[REV]'), |
3271 ) | 3323 ) |
3272 def debugsuccessorssets(ui, repo, *revs, **opts): | 3324 def debugsuccessorssets(ui, repo, *revs, **opts): |
3273 """show set of successors for revision | 3325 """show set of successors for revision |
3274 | 3326 |
3275 A successors set of changeset A is a consistent group of revisions that | 3327 A successors set of changeset A is a consistent group of revisions that |
3305 cache = {} | 3357 cache = {} |
3306 ctx2str = bytes | 3358 ctx2str = bytes |
3307 node2str = short | 3359 node2str = short |
3308 for rev in scmutil.revrange(repo, revs): | 3360 for rev in scmutil.revrange(repo, revs): |
3309 ctx = repo[rev] | 3361 ctx = repo[rev] |
3310 ui.write('%s\n' % ctx2str(ctx)) | 3362 ui.write(b'%s\n' % ctx2str(ctx)) |
3311 for succsset in obsutil.successorssets( | 3363 for succsset in obsutil.successorssets( |
3312 repo, ctx.node(), closest=opts[r'closest'], cache=cache | 3364 repo, ctx.node(), closest=opts[r'closest'], cache=cache |
3313 ): | 3365 ): |
3314 if succsset: | 3366 if succsset: |
3315 ui.write(' ') | 3367 ui.write(b' ') |
3316 ui.write(node2str(succsset[0])) | 3368 ui.write(node2str(succsset[0])) |
3317 for node in succsset[1:]: | 3369 for node in succsset[1:]: |
3318 ui.write(' ') | 3370 ui.write(b' ') |
3319 ui.write(node2str(node)) | 3371 ui.write(node2str(node)) |
3320 ui.write('\n') | 3372 ui.write(b'\n') |
3321 | 3373 |
3322 | 3374 |
3323 @command( | 3375 @command( |
3324 'debugtemplate', | 3376 b'debugtemplate', |
3325 [ | 3377 [ |
3326 ('r', 'rev', [], _('apply template on changesets'), _('REV')), | 3378 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')), |
3327 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE')), | 3379 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')), |
3328 ], | 3380 ], |
3329 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'), | 3381 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'), |
3330 optionalrepo=True, | 3382 optionalrepo=True, |
3331 ) | 3383 ) |
3332 def debugtemplate(ui, repo, tmpl, **opts): | 3384 def debugtemplate(ui, repo, tmpl, **opts): |
3333 """parse and apply a template | 3385 """parse and apply a template |
3334 | 3386 |
3340 """ | 3392 """ |
3341 revs = None | 3393 revs = None |
3342 if opts[r'rev']: | 3394 if opts[r'rev']: |
3343 if repo is None: | 3395 if repo is None: |
3344 raise error.RepoError( | 3396 raise error.RepoError( |
3345 _('there is no Mercurial repository here ' '(.hg not found)') | 3397 _(b'there is no Mercurial repository here ' b'(.hg not found)') |
3346 ) | 3398 ) |
3347 revs = scmutil.revrange(repo, opts[r'rev']) | 3399 revs = scmutil.revrange(repo, opts[r'rev']) |
3348 | 3400 |
3349 props = {} | 3401 props = {} |
3350 for d in opts[r'define']: | 3402 for d in opts[r'define']: |
3351 try: | 3403 try: |
3352 k, v = (e.strip() for e in d.split('=', 1)) | 3404 k, v = (e.strip() for e in d.split(b'=', 1)) |
3353 if not k or k == 'ui': | 3405 if not k or k == b'ui': |
3354 raise ValueError | 3406 raise ValueError |
3355 props[k] = v | 3407 props[k] = v |
3356 except ValueError: | 3408 except ValueError: |
3357 raise error.Abort(_('malformed keyword definition: %s') % d) | 3409 raise error.Abort(_(b'malformed keyword definition: %s') % d) |
3358 | 3410 |
3359 if ui.verbose: | 3411 if ui.verbose: |
3360 aliases = ui.configitems('templatealias') | 3412 aliases = ui.configitems(b'templatealias') |
3361 tree = templater.parse(tmpl) | 3413 tree = templater.parse(tmpl) |
3362 ui.note(templater.prettyformat(tree), '\n') | 3414 ui.note(templater.prettyformat(tree), b'\n') |
3363 newtree = templater.expandaliases(tree, aliases) | 3415 newtree = templater.expandaliases(tree, aliases) |
3364 if newtree != tree: | 3416 if newtree != tree: |
3365 ui.note("* expanded:\n", templater.prettyformat(newtree), '\n') | 3417 ui.note(b"* expanded:\n", templater.prettyformat(newtree), b'\n') |
3366 | 3418 |
3367 if revs is None: | 3419 if revs is None: |
3368 tres = formatter.templateresources(ui, repo) | 3420 tres = formatter.templateresources(ui, repo) |
3369 t = formatter.maketemplater(ui, tmpl, resources=tres) | 3421 t = formatter.maketemplater(ui, tmpl, resources=tres) |
3370 if ui.verbose: | 3422 if ui.verbose: |
3371 kwds, funcs = t.symbolsuseddefault() | 3423 kwds, funcs = t.symbolsuseddefault() |
3372 ui.write("* keywords: %s\n" % ', '.join(sorted(kwds))) | 3424 ui.write(b"* keywords: %s\n" % b', '.join(sorted(kwds))) |
3373 ui.write("* functions: %s\n" % ', '.join(sorted(funcs))) | 3425 ui.write(b"* functions: %s\n" % b', '.join(sorted(funcs))) |
3374 ui.write(t.renderdefault(props)) | 3426 ui.write(t.renderdefault(props)) |
3375 else: | 3427 else: |
3376 displayer = logcmdutil.maketemplater(ui, repo, tmpl) | 3428 displayer = logcmdutil.maketemplater(ui, repo, tmpl) |
3377 if ui.verbose: | 3429 if ui.verbose: |
3378 kwds, funcs = displayer.t.symbolsuseddefault() | 3430 kwds, funcs = displayer.t.symbolsuseddefault() |
3379 ui.write("* keywords: %s\n" % ', '.join(sorted(kwds))) | 3431 ui.write(b"* keywords: %s\n" % b', '.join(sorted(kwds))) |
3380 ui.write("* functions: %s\n" % ', '.join(sorted(funcs))) | 3432 ui.write(b"* functions: %s\n" % b', '.join(sorted(funcs))) |
3381 for r in revs: | 3433 for r in revs: |
3382 displayer.show(repo[r], **pycompat.strkwargs(props)) | 3434 displayer.show(repo[r], **pycompat.strkwargs(props)) |
3383 displayer.close() | 3435 displayer.close() |
3384 | 3436 |
3385 | 3437 |
3386 @command( | 3438 @command( |
3387 'debuguigetpass', | 3439 b'debuguigetpass', |
3388 [('p', 'prompt', '', _('prompt text'), _('TEXT')),], | 3440 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),], |
3389 _('[-p TEXT]'), | 3441 _(b'[-p TEXT]'), |
3390 norepo=True, | 3442 norepo=True, |
3391 ) | 3443 ) |
3392 def debuguigetpass(ui, prompt=''): | 3444 def debuguigetpass(ui, prompt=b''): |
3393 """show prompt to type password""" | 3445 """show prompt to type password""" |
3394 r = ui.getpass(prompt) | 3446 r = ui.getpass(prompt) |
3395 ui.write('respose: %s\n' % r) | 3447 ui.write(b'respose: %s\n' % r) |
3396 | 3448 |
3397 | 3449 |
3398 @command( | 3450 @command( |
3399 'debuguiprompt', | 3451 b'debuguiprompt', |
3400 [('p', 'prompt', '', _('prompt text'), _('TEXT')),], | 3452 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),], |
3401 _('[-p TEXT]'), | 3453 _(b'[-p TEXT]'), |
3402 norepo=True, | 3454 norepo=True, |
3403 ) | 3455 ) |
3404 def debuguiprompt(ui, prompt=''): | 3456 def debuguiprompt(ui, prompt=b''): |
3405 """show plain prompt""" | 3457 """show plain prompt""" |
3406 r = ui.prompt(prompt) | 3458 r = ui.prompt(prompt) |
3407 ui.write('response: %s\n' % r) | 3459 ui.write(b'response: %s\n' % r) |
3408 | 3460 |
3409 | 3461 |
3410 @command('debugupdatecaches', []) | 3462 @command(b'debugupdatecaches', []) |
3411 def debugupdatecaches(ui, repo, *pats, **opts): | 3463 def debugupdatecaches(ui, repo, *pats, **opts): |
3412 """warm all known caches in the repository""" | 3464 """warm all known caches in the repository""" |
3413 with repo.wlock(), repo.lock(): | 3465 with repo.wlock(), repo.lock(): |
3414 repo.updatecaches(full=True) | 3466 repo.updatecaches(full=True) |
3415 | 3467 |
3416 | 3468 |
3417 @command( | 3469 @command( |
3418 'debugupgraderepo', | 3470 b'debugupgraderepo', |
3419 [ | 3471 [ |
3420 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')), | 3472 ( |
3421 ('', 'run', False, _('performs an upgrade')), | 3473 b'o', |
3422 ('', 'backup', True, _('keep the old repository content around')), | 3474 b'optimize', |
3423 ('', 'changelog', None, _('select the changelog for upgrade')), | 3475 [], |
3424 ('', 'manifest', None, _('select the manifest for upgrade')), | 3476 _(b'extra optimization to perform'), |
3477 _(b'NAME'), | |
3478 ), | |
3479 (b'', b'run', False, _(b'performs an upgrade')), | |
3480 (b'', b'backup', True, _(b'keep the old repository content around')), | |
3481 (b'', b'changelog', None, _(b'select the changelog for upgrade')), | |
3482 (b'', b'manifest', None, _(b'select the manifest for upgrade')), | |
3425 ], | 3483 ], |
3426 ) | 3484 ) |
3427 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts): | 3485 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts): |
3428 """upgrade a repository to use different features | 3486 """upgrade a repository to use different features |
3429 | 3487 |
3455 ui, repo, run=run, optimize=optimize, backup=backup, **opts | 3513 ui, repo, run=run, optimize=optimize, backup=backup, **opts |
3456 ) | 3514 ) |
3457 | 3515 |
3458 | 3516 |
3459 @command( | 3517 @command( |
3460 'debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'), inferrepo=True | 3518 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True |
3461 ) | 3519 ) |
3462 def debugwalk(ui, repo, *pats, **opts): | 3520 def debugwalk(ui, repo, *pats, **opts): |
3463 """show how files match on given patterns""" | 3521 """show how files match on given patterns""" |
3464 opts = pycompat.byteskwargs(opts) | 3522 opts = pycompat.byteskwargs(opts) |
3465 m = scmutil.match(repo[None], pats, opts) | 3523 m = scmutil.match(repo[None], pats, opts) |
3466 if ui.verbose: | 3524 if ui.verbose: |
3467 ui.write('* matcher:\n', stringutil.prettyrepr(m), '\n') | 3525 ui.write(b'* matcher:\n', stringutil.prettyrepr(m), b'\n') |
3468 items = list(repo[None].walk(m)) | 3526 items = list(repo[None].walk(m)) |
3469 if not items: | 3527 if not items: |
3470 return | 3528 return |
3471 f = lambda fn: fn | 3529 f = lambda fn: fn |
3472 if ui.configbool('ui', 'slash') and pycompat.ossep != '/': | 3530 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/': |
3473 f = lambda fn: util.normpath(fn) | 3531 f = lambda fn: util.normpath(fn) |
3474 fmt = 'f %%-%ds %%-%ds %%s' % ( | 3532 fmt = b'f %%-%ds %%-%ds %%s' % ( |
3475 max([len(abs) for abs in items]), | 3533 max([len(abs) for abs in items]), |
3476 max([len(repo.pathto(abs)) for abs in items]), | 3534 max([len(repo.pathto(abs)) for abs in items]), |
3477 ) | 3535 ) |
3478 for abs in items: | 3536 for abs in items: |
3479 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '') | 3537 line = fmt % ( |
3480 ui.write("%s\n" % line.rstrip()) | 3538 abs, |
3481 | 3539 f(repo.pathto(abs)), |
3482 | 3540 m.exact(abs) and b'exact' or b'', |
3483 @command('debugwhyunstable', [], _('REV')) | 3541 ) |
3542 ui.write(b"%s\n" % line.rstrip()) | |
3543 | |
3544 | |
3545 @command(b'debugwhyunstable', [], _(b'REV')) | |
3484 def debugwhyunstable(ui, repo, rev): | 3546 def debugwhyunstable(ui, repo, rev): |
3485 """explain instabilities of a changeset""" | 3547 """explain instabilities of a changeset""" |
3486 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)): | 3548 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)): |
3487 dnodes = '' | 3549 dnodes = b'' |
3488 if entry.get('divergentnodes'): | 3550 if entry.get(b'divergentnodes'): |
3489 dnodes = ( | 3551 dnodes = ( |
3490 ' '.join( | 3552 b' '.join( |
3491 '%s (%s)' % (ctx.hex(), ctx.phasestr()) | 3553 b'%s (%s)' % (ctx.hex(), ctx.phasestr()) |
3492 for ctx in entry['divergentnodes'] | 3554 for ctx in entry[b'divergentnodes'] |
3493 ) | 3555 ) |
3494 + ' ' | 3556 + b' ' |
3495 ) | 3557 ) |
3496 ui.write( | 3558 ui.write( |
3497 '%s: %s%s %s\n' | 3559 b'%s: %s%s %s\n' |
3498 % (entry['instability'], dnodes, entry['reason'], entry['node']) | 3560 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node']) |
3499 ) | 3561 ) |
3500 | 3562 |
3501 | 3563 |
3502 @command( | 3564 @command( |
3503 'debugwireargs', | 3565 b'debugwireargs', |
3504 [ | 3566 [ |
3505 ('', 'three', '', 'three'), | 3567 (b'', b'three', b'', b'three'), |
3506 ('', 'four', '', 'four'), | 3568 (b'', b'four', b'', b'four'), |
3507 ('', 'five', '', 'five'), | 3569 (b'', b'five', b'', b'five'), |
3508 ] | 3570 ] |
3509 + cmdutil.remoteopts, | 3571 + cmdutil.remoteopts, |
3510 _('REPO [OPTIONS]... [ONE [TWO]]'), | 3572 _(b'REPO [OPTIONS]... [ONE [TWO]]'), |
3511 norepo=True, | 3573 norepo=True, |
3512 ) | 3574 ) |
3513 def debugwireargs(ui, repopath, *vals, **opts): | 3575 def debugwireargs(ui, repopath, *vals, **opts): |
3514 opts = pycompat.byteskwargs(opts) | 3576 opts = pycompat.byteskwargs(opts) |
3515 repo = hg.peer(ui, opts, repopath) | 3577 repo = hg.peer(ui, opts, repopath) |
3521 args[k] = v | 3583 args[k] = v |
3522 args = pycompat.strkwargs(args) | 3584 args = pycompat.strkwargs(args) |
3523 # run twice to check that we don't mess up the stream for the next command | 3585 # run twice to check that we don't mess up the stream for the next command |
3524 res1 = repo.debugwireargs(*vals, **args) | 3586 res1 = repo.debugwireargs(*vals, **args) |
3525 res2 = repo.debugwireargs(*vals, **args) | 3587 res2 = repo.debugwireargs(*vals, **args) |
3526 ui.write("%s\n" % res1) | 3588 ui.write(b"%s\n" % res1) |
3527 if res1 != res2: | 3589 if res1 != res2: |
3528 ui.warn("%s\n" % res2) | 3590 ui.warn(b"%s\n" % res2) |
3529 | 3591 |
3530 | 3592 |
3531 def _parsewirelangblocks(fh): | 3593 def _parsewirelangblocks(fh): |
3532 activeaction = None | 3594 activeaction = None |
3533 blocklines = [] | 3595 blocklines = [] |
3552 continue | 3614 continue |
3553 | 3615 |
3554 # Else we start with an indent. | 3616 # Else we start with an indent. |
3555 | 3617 |
3556 if not activeaction: | 3618 if not activeaction: |
3557 raise error.Abort(_('indented line outside of block')) | 3619 raise error.Abort(_(b'indented line outside of block')) |
3558 | 3620 |
3559 indent = len(line) - len(line.lstrip()) | 3621 indent = len(line) - len(line.lstrip()) |
3560 | 3622 |
3561 # If this line is indented more than the last line, concatenate it. | 3623 # If this line is indented more than the last line, concatenate it. |
3562 if indent > lastindent and blocklines: | 3624 if indent > lastindent and blocklines: |
3569 if activeaction: | 3631 if activeaction: |
3570 yield activeaction, blocklines | 3632 yield activeaction, blocklines |
3571 | 3633 |
3572 | 3634 |
3573 @command( | 3635 @command( |
3574 'debugwireproto', | 3636 b'debugwireproto', |
3575 [ | 3637 [ |
3576 ('', 'localssh', False, _('start an SSH server for this repo')), | 3638 (b'', b'localssh', False, _(b'start an SSH server for this repo')), |
3577 ('', 'peer', '', _('construct a specific version of the peer')), | 3639 (b'', b'peer', b'', _(b'construct a specific version of the peer')), |
3578 ('', 'noreadstderr', False, _('do not read from stderr of the remote')), | |
3579 ( | 3640 ( |
3580 '', | 3641 b'', |
3581 'nologhandshake', | 3642 b'noreadstderr', |
3582 False, | 3643 False, |
3583 _('do not log I/O related to the peer handshake'), | 3644 _(b'do not read from stderr of the remote'), |
3645 ), | |
3646 ( | |
3647 b'', | |
3648 b'nologhandshake', | |
3649 False, | |
3650 _(b'do not log I/O related to the peer handshake'), | |
3584 ), | 3651 ), |
3585 ] | 3652 ] |
3586 + cmdutil.remoteopts, | 3653 + cmdutil.remoteopts, |
3587 _('[PATH]'), | 3654 _(b'[PATH]'), |
3588 optionalrepo=True, | 3655 optionalrepo=True, |
3589 ) | 3656 ) |
3590 def debugwireproto(ui, repo, path=None, **opts): | 3657 def debugwireproto(ui, repo, path=None, **opts): |
3591 """send wire protocol commands to a server | 3658 """send wire protocol commands to a server |
3592 | 3659 |
3768 resulting object is fed into a CBOR encoder. Otherwise it is interpreted | 3835 resulting object is fed into a CBOR encoder. Otherwise it is interpreted |
3769 as a Python byte string literal. | 3836 as a Python byte string literal. |
3770 """ | 3837 """ |
3771 opts = pycompat.byteskwargs(opts) | 3838 opts = pycompat.byteskwargs(opts) |
3772 | 3839 |
3773 if opts['localssh'] and not repo: | 3840 if opts[b'localssh'] and not repo: |
3774 raise error.Abort(_('--localssh requires a repository')) | 3841 raise error.Abort(_(b'--localssh requires a repository')) |
3775 | 3842 |
3776 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'): | 3843 if opts[b'peer'] and opts[b'peer'] not in ( |
3844 b'raw', | |
3845 b'http2', | |
3846 b'ssh1', | |
3847 b'ssh2', | |
3848 ): | |
3777 raise error.Abort( | 3849 raise error.Abort( |
3778 _('invalid value for --peer'), | 3850 _(b'invalid value for --peer'), |
3779 hint=_('valid values are "raw", "ssh1", and "ssh2"'), | 3851 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'), |
3780 ) | 3852 ) |
3781 | 3853 |
3782 if path and opts['localssh']: | 3854 if path and opts[b'localssh']: |
3783 raise error.Abort( | 3855 raise error.Abort( |
3784 _('cannot specify --localssh with an explicit ' 'path') | 3856 _(b'cannot specify --localssh with an explicit ' b'path') |
3785 ) | 3857 ) |
3786 | 3858 |
3787 if ui.interactive(): | 3859 if ui.interactive(): |
3788 ui.write(_('(waiting for commands on stdin)\n')) | 3860 ui.write(_(b'(waiting for commands on stdin)\n')) |
3789 | 3861 |
3790 blocks = list(_parsewirelangblocks(ui.fin)) | 3862 blocks = list(_parsewirelangblocks(ui.fin)) |
3791 | 3863 |
3792 proc = None | 3864 proc = None |
3793 stdin = None | 3865 stdin = None |
3794 stdout = None | 3866 stdout = None |
3795 stderr = None | 3867 stderr = None |
3796 opener = None | 3868 opener = None |
3797 | 3869 |
3798 if opts['localssh']: | 3870 if opts[b'localssh']: |
3799 # We start the SSH server in its own process so there is process | 3871 # We start the SSH server in its own process so there is process |
3800 # separation. This prevents a whole class of potential bugs around | 3872 # separation. This prevents a whole class of potential bugs around |
3801 # shared state from interfering with server operation. | 3873 # shared state from interfering with server operation. |
3802 args = procutil.hgcmd() + [ | 3874 args = procutil.hgcmd() + [ |
3803 '-R', | 3875 b'-R', |
3804 repo.root, | 3876 repo.root, |
3805 'debugserve', | 3877 b'debugserve', |
3806 '--sshstdio', | 3878 b'--sshstdio', |
3807 ] | 3879 ] |
3808 proc = subprocess.Popen( | 3880 proc = subprocess.Popen( |
3809 pycompat.rapply(procutil.tonativestr, args), | 3881 pycompat.rapply(procutil.tonativestr, args), |
3810 stdin=subprocess.PIPE, | 3882 stdin=subprocess.PIPE, |
3811 stdout=subprocess.PIPE, | 3883 stdout=subprocess.PIPE, |
3816 stdin = proc.stdin | 3888 stdin = proc.stdin |
3817 stdout = proc.stdout | 3889 stdout = proc.stdout |
3818 stderr = proc.stderr | 3890 stderr = proc.stderr |
3819 | 3891 |
3820 # We turn the pipes into observers so we can log I/O. | 3892 # We turn the pipes into observers so we can log I/O. |
3821 if ui.verbose or opts['peer'] == 'raw': | 3893 if ui.verbose or opts[b'peer'] == b'raw': |
3822 stdin = util.makeloggingfileobject( | 3894 stdin = util.makeloggingfileobject( |
3823 ui, proc.stdin, b'i', logdata=True | 3895 ui, proc.stdin, b'i', logdata=True |
3824 ) | 3896 ) |
3825 stdout = util.makeloggingfileobject( | 3897 stdout = util.makeloggingfileobject( |
3826 ui, proc.stdout, b'o', logdata=True | 3898 ui, proc.stdout, b'o', logdata=True |
3829 ui, proc.stderr, b'e', logdata=True | 3901 ui, proc.stderr, b'e', logdata=True |
3830 ) | 3902 ) |
3831 | 3903 |
3832 # --localssh also implies the peer connection settings. | 3904 # --localssh also implies the peer connection settings. |
3833 | 3905 |
3834 url = 'ssh://localserver' | 3906 url = b'ssh://localserver' |
3835 autoreadstderr = not opts['noreadstderr'] | 3907 autoreadstderr = not opts[b'noreadstderr'] |
3836 | 3908 |
3837 if opts['peer'] == 'ssh1': | 3909 if opts[b'peer'] == b'ssh1': |
3838 ui.write(_('creating ssh peer for wire protocol version 1\n')) | 3910 ui.write(_(b'creating ssh peer for wire protocol version 1\n')) |
3839 peer = sshpeer.sshv1peer( | 3911 peer = sshpeer.sshv1peer( |
3840 ui, | 3912 ui, |
3841 url, | 3913 url, |
3842 proc, | 3914 proc, |
3843 stdin, | 3915 stdin, |
3844 stdout, | 3916 stdout, |
3845 stderr, | 3917 stderr, |
3846 None, | 3918 None, |
3847 autoreadstderr=autoreadstderr, | 3919 autoreadstderr=autoreadstderr, |
3848 ) | 3920 ) |
3849 elif opts['peer'] == 'ssh2': | 3921 elif opts[b'peer'] == b'ssh2': |
3850 ui.write(_('creating ssh peer for wire protocol version 2\n')) | 3922 ui.write(_(b'creating ssh peer for wire protocol version 2\n')) |
3851 peer = sshpeer.sshv2peer( | 3923 peer = sshpeer.sshv2peer( |
3852 ui, | 3924 ui, |
3853 url, | 3925 url, |
3854 proc, | 3926 proc, |
3855 stdin, | 3927 stdin, |
3856 stdout, | 3928 stdout, |
3857 stderr, | 3929 stderr, |
3858 None, | 3930 None, |
3859 autoreadstderr=autoreadstderr, | 3931 autoreadstderr=autoreadstderr, |
3860 ) | 3932 ) |
3861 elif opts['peer'] == 'raw': | 3933 elif opts[b'peer'] == b'raw': |
3862 ui.write(_('using raw connection to peer\n')) | 3934 ui.write(_(b'using raw connection to peer\n')) |
3863 peer = None | 3935 peer = None |
3864 else: | 3936 else: |
3865 ui.write(_('creating ssh peer from handshake results\n')) | 3937 ui.write(_(b'creating ssh peer from handshake results\n')) |
3866 peer = sshpeer.makepeer( | 3938 peer = sshpeer.makepeer( |
3867 ui, | 3939 ui, |
3868 url, | 3940 url, |
3869 proc, | 3941 proc, |
3870 stdin, | 3942 stdin, |
3876 elif path: | 3948 elif path: |
3877 # We bypass hg.peer() so we can proxy the sockets. | 3949 # We bypass hg.peer() so we can proxy the sockets. |
3878 # TODO consider not doing this because we skip | 3950 # TODO consider not doing this because we skip |
3879 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality. | 3951 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality. |
3880 u = util.url(path) | 3952 u = util.url(path) |
3881 if u.scheme != 'http': | 3953 if u.scheme != b'http': |
3882 raise error.Abort(_('only http:// paths are currently supported')) | 3954 raise error.Abort(_(b'only http:// paths are currently supported')) |
3883 | 3955 |
3884 url, authinfo = u.authinfo() | 3956 url, authinfo = u.authinfo() |
3885 openerargs = { | 3957 openerargs = { |
3886 r'useragent': b'Mercurial debugwireproto', | 3958 r'useragent': b'Mercurial debugwireproto', |
3887 } | 3959 } |
3900 openerargs[r'loggingopts'][r'logdataapis'] = True | 3972 openerargs[r'loggingopts'][r'logdataapis'] = True |
3901 | 3973 |
3902 # Don't send default headers when in raw mode. This allows us to | 3974 # Don't send default headers when in raw mode. This allows us to |
3903 # bypass most of the behavior of our URL handling code so we can | 3975 # bypass most of the behavior of our URL handling code so we can |
3904 # have near complete control over what's sent on the wire. | 3976 # have near complete control over what's sent on the wire. |
3905 if opts['peer'] == 'raw': | 3977 if opts[b'peer'] == b'raw': |
3906 openerargs[r'sendaccept'] = False | 3978 openerargs[r'sendaccept'] = False |
3907 | 3979 |
3908 opener = urlmod.opener(ui, authinfo, **openerargs) | 3980 opener = urlmod.opener(ui, authinfo, **openerargs) |
3909 | 3981 |
3910 if opts['peer'] == 'http2': | 3982 if opts[b'peer'] == b'http2': |
3911 ui.write(_('creating http peer for wire protocol version 2\n')) | 3983 ui.write(_(b'creating http peer for wire protocol version 2\n')) |
3912 # We go through makepeer() because we need an API descriptor for | 3984 # We go through makepeer() because we need an API descriptor for |
3913 # the peer instance to be useful. | 3985 # the peer instance to be useful. |
3914 with ui.configoverride( | 3986 with ui.configoverride( |
3915 {('experimental', 'httppeer.advertise-v2'): True} | 3987 {(b'experimental', b'httppeer.advertise-v2'): True} |
3916 ): | 3988 ): |
3917 if opts['nologhandshake']: | 3989 if opts[b'nologhandshake']: |
3918 ui.pushbuffer() | 3990 ui.pushbuffer() |
3919 | 3991 |
3920 peer = httppeer.makepeer(ui, path, opener=opener) | 3992 peer = httppeer.makepeer(ui, path, opener=opener) |
3921 | 3993 |
3922 if opts['nologhandshake']: | 3994 if opts[b'nologhandshake']: |
3923 ui.popbuffer() | 3995 ui.popbuffer() |
3924 | 3996 |
3925 if not isinstance(peer, httppeer.httpv2peer): | 3997 if not isinstance(peer, httppeer.httpv2peer): |
3926 raise error.Abort( | 3998 raise error.Abort( |
3927 _( | 3999 _( |
3928 'could not instantiate HTTP peer for ' | 4000 b'could not instantiate HTTP peer for ' |
3929 'wire protocol version 2' | 4001 b'wire protocol version 2' |
3930 ), | 4002 ), |
3931 hint=_( | 4003 hint=_( |
3932 'the server may not have the feature ' | 4004 b'the server may not have the feature ' |
3933 'enabled or is not allowing this ' | 4005 b'enabled or is not allowing this ' |
3934 'client version' | 4006 b'client version' |
3935 ), | 4007 ), |
3936 ) | 4008 ) |
3937 | 4009 |
3938 elif opts['peer'] == 'raw': | 4010 elif opts[b'peer'] == b'raw': |
3939 ui.write(_('using raw connection to peer\n')) | 4011 ui.write(_(b'using raw connection to peer\n')) |
3940 peer = None | 4012 peer = None |
3941 elif opts['peer']: | 4013 elif opts[b'peer']: |
3942 raise error.Abort( | 4014 raise error.Abort( |
3943 _('--peer %s not supported with HTTP peers') % opts['peer'] | 4015 _(b'--peer %s not supported with HTTP peers') % opts[b'peer'] |
3944 ) | 4016 ) |
3945 else: | 4017 else: |
3946 peer = httppeer.makepeer(ui, path, opener=opener) | 4018 peer = httppeer.makepeer(ui, path, opener=opener) |
3947 | 4019 |
3948 # We /could/ populate stdin/stdout with sock.makefile()... | 4020 # We /could/ populate stdin/stdout with sock.makefile()... |
3949 else: | 4021 else: |
3950 raise error.Abort(_('unsupported connection configuration')) | 4022 raise error.Abort(_(b'unsupported connection configuration')) |
3951 | 4023 |
3952 batchedcommands = None | 4024 batchedcommands = None |
3953 | 4025 |
3954 # Now perform actions based on the parsed wire language instructions. | 4026 # Now perform actions based on the parsed wire language instructions. |
3955 for action, lines in blocks: | 4027 for action, lines in blocks: |
3956 if action in ('raw', 'raw+'): | 4028 if action in (b'raw', b'raw+'): |
3957 if not stdin: | 4029 if not stdin: |
3958 raise error.Abort(_('cannot call raw/raw+ on this peer')) | 4030 raise error.Abort(_(b'cannot call raw/raw+ on this peer')) |
3959 | 4031 |
3960 # Concatenate the data together. | 4032 # Concatenate the data together. |
3961 data = ''.join(l.lstrip() for l in lines) | 4033 data = b''.join(l.lstrip() for l in lines) |
3962 data = stringutil.unescapestr(data) | 4034 data = stringutil.unescapestr(data) |
3963 stdin.write(data) | 4035 stdin.write(data) |
3964 | 4036 |
3965 if action == 'raw+': | 4037 if action == b'raw+': |
3966 stdin.flush() | 4038 stdin.flush() |
3967 elif action == 'flush': | 4039 elif action == b'flush': |
3968 if not stdin: | 4040 if not stdin: |
3969 raise error.Abort(_('cannot call flush on this peer')) | 4041 raise error.Abort(_(b'cannot call flush on this peer')) |
3970 stdin.flush() | 4042 stdin.flush() |
3971 elif action.startswith('command'): | 4043 elif action.startswith(b'command'): |
3972 if not peer: | 4044 if not peer: |
3973 raise error.Abort( | 4045 raise error.Abort( |
3974 _( | 4046 _( |
3975 'cannot send commands unless peer instance ' | 4047 b'cannot send commands unless peer instance ' |
3976 'is available' | 4048 b'is available' |
3977 ) | 4049 ) |
3978 ) | 4050 ) |
3979 | 4051 |
3980 command = action.split(' ', 1)[1] | 4052 command = action.split(b' ', 1)[1] |
3981 | 4053 |
3982 args = {} | 4054 args = {} |
3983 for line in lines: | 4055 for line in lines: |
3984 # We need to allow empty values. | 4056 # We need to allow empty values. |
3985 fields = line.lstrip().split(' ', 1) | 4057 fields = line.lstrip().split(b' ', 1) |
3986 if len(fields) == 1: | 4058 if len(fields) == 1: |
3987 key = fields[0] | 4059 key = fields[0] |
3988 value = '' | 4060 value = b'' |
3989 else: | 4061 else: |
3990 key, value = fields | 4062 key, value = fields |
3991 | 4063 |
3992 if value.startswith('eval:'): | 4064 if value.startswith(b'eval:'): |
3993 value = stringutil.evalpythonliteral(value[5:]) | 4065 value = stringutil.evalpythonliteral(value[5:]) |
3994 else: | 4066 else: |
3995 value = stringutil.unescapestr(value) | 4067 value = stringutil.unescapestr(value) |
3996 | 4068 |
3997 args[key] = value | 4069 args[key] = value |
3998 | 4070 |
3999 if batchedcommands is not None: | 4071 if batchedcommands is not None: |
4000 batchedcommands.append((command, args)) | 4072 batchedcommands.append((command, args)) |
4001 continue | 4073 continue |
4002 | 4074 |
4003 ui.status(_('sending %s command\n') % command) | 4075 ui.status(_(b'sending %s command\n') % command) |
4004 | 4076 |
4005 if 'PUSHFILE' in args: | 4077 if b'PUSHFILE' in args: |
4006 with open(args['PUSHFILE'], r'rb') as fh: | 4078 with open(args[b'PUSHFILE'], r'rb') as fh: |
4007 del args['PUSHFILE'] | 4079 del args[b'PUSHFILE'] |
4008 res, output = peer._callpush( | 4080 res, output = peer._callpush( |
4009 command, fh, **pycompat.strkwargs(args) | 4081 command, fh, **pycompat.strkwargs(args) |
4010 ) | 4082 ) |
4011 ui.status(_('result: %s\n') % stringutil.escapestr(res)) | 4083 ui.status(_(b'result: %s\n') % stringutil.escapestr(res)) |
4012 ui.status( | 4084 ui.status( |
4013 _('remote output: %s\n') % stringutil.escapestr(output) | 4085 _(b'remote output: %s\n') % stringutil.escapestr(output) |
4014 ) | 4086 ) |
4015 else: | 4087 else: |
4016 with peer.commandexecutor() as e: | 4088 with peer.commandexecutor() as e: |
4017 res = e.callcommand(command, args).result() | 4089 res = e.callcommand(command, args).result() |
4018 | 4090 |
4019 if isinstance(res, wireprotov2peer.commandresponse): | 4091 if isinstance(res, wireprotov2peer.commandresponse): |
4020 val = res.objects() | 4092 val = res.objects() |
4021 ui.status( | 4093 ui.status( |
4022 _('response: %s\n') | 4094 _(b'response: %s\n') |
4023 % stringutil.pprint(val, bprefix=True, indent=2) | 4095 % stringutil.pprint(val, bprefix=True, indent=2) |
4024 ) | 4096 ) |
4025 else: | 4097 else: |
4026 ui.status( | 4098 ui.status( |
4027 _('response: %s\n') | 4099 _(b'response: %s\n') |
4028 % stringutil.pprint(res, bprefix=True, indent=2) | 4100 % stringutil.pprint(res, bprefix=True, indent=2) |
4029 ) | 4101 ) |
4030 | 4102 |
4031 elif action == 'batchbegin': | 4103 elif action == b'batchbegin': |
4032 if batchedcommands is not None: | 4104 if batchedcommands is not None: |
4033 raise error.Abort(_('nested batchbegin not allowed')) | 4105 raise error.Abort(_(b'nested batchbegin not allowed')) |
4034 | 4106 |
4035 batchedcommands = [] | 4107 batchedcommands = [] |
4036 elif action == 'batchsubmit': | 4108 elif action == b'batchsubmit': |
4037 # There is a batching API we could go through. But it would be | 4109 # There is a batching API we could go through. But it would be |
4038 # difficult to normalize requests into function calls. It is easier | 4110 # difficult to normalize requests into function calls. It is easier |
4039 # to bypass this layer and normalize to commands + args. | 4111 # to bypass this layer and normalize to commands + args. |
4040 ui.status( | 4112 ui.status( |
4041 _('sending batch with %d sub-commands\n') % len(batchedcommands) | 4113 _(b'sending batch with %d sub-commands\n') |
4114 % len(batchedcommands) | |
4042 ) | 4115 ) |
4043 for i, chunk in enumerate(peer._submitbatch(batchedcommands)): | 4116 for i, chunk in enumerate(peer._submitbatch(batchedcommands)): |
4044 ui.status( | 4117 ui.status( |
4045 _('response #%d: %s\n') % (i, stringutil.escapestr(chunk)) | 4118 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk)) |
4046 ) | 4119 ) |
4047 | 4120 |
4048 batchedcommands = None | 4121 batchedcommands = None |
4049 | 4122 |
4050 elif action.startswith('httprequest '): | 4123 elif action.startswith(b'httprequest '): |
4051 if not opener: | 4124 if not opener: |
4052 raise error.Abort( | 4125 raise error.Abort( |
4053 _('cannot use httprequest without an HTTP ' 'peer') | 4126 _(b'cannot use httprequest without an HTTP ' b'peer') |
4054 ) | 4127 ) |
4055 | 4128 |
4056 request = action.split(' ', 2) | 4129 request = action.split(b' ', 2) |
4057 if len(request) != 3: | 4130 if len(request) != 3: |
4058 raise error.Abort( | 4131 raise error.Abort( |
4059 _( | 4132 _( |
4060 'invalid httprequest: expected format is ' | 4133 b'invalid httprequest: expected format is ' |
4061 '"httprequest <method> <path>' | 4134 b'"httprequest <method> <path>' |
4062 ) | 4135 ) |
4063 ) | 4136 ) |
4064 | 4137 |
4065 method, httppath = request[1:] | 4138 method, httppath = request[1:] |
4066 headers = {} | 4139 headers = {} |
4075 value = pycompat.strurl(m.group(2)) | 4148 value = pycompat.strurl(m.group(2)) |
4076 headers[key] = value | 4149 headers[key] = value |
4077 continue | 4150 continue |
4078 | 4151 |
4079 if line.startswith(b'BODYFILE '): | 4152 if line.startswith(b'BODYFILE '): |
4080 with open(line.split(b' ', 1), 'rb') as fh: | 4153 with open(line.split(b' ', 1), b'rb') as fh: |
4081 body = fh.read() | 4154 body = fh.read() |
4082 elif line.startswith(b'frame '): | 4155 elif line.startswith(b'frame '): |
4083 frame = wireprotoframing.makeframefromhumanstring( | 4156 frame = wireprotoframing.makeframefromhumanstring( |
4084 line[len(b'frame ') :] | 4157 line[len(b'frame ') :] |
4085 ) | 4158 ) |
4086 | 4159 |
4087 frames.append(frame) | 4160 frames.append(frame) |
4088 else: | 4161 else: |
4089 raise error.Abort( | 4162 raise error.Abort( |
4090 _('unknown argument to httprequest: %s') % line | 4163 _(b'unknown argument to httprequest: %s') % line |
4091 ) | 4164 ) |
4092 | 4165 |
4093 url = path + httppath | 4166 url = path + httppath |
4094 | 4167 |
4095 if frames: | 4168 if frames: |
4111 continue | 4184 continue |
4112 | 4185 |
4113 ct = res.headers.get(r'Content-Type') | 4186 ct = res.headers.get(r'Content-Type') |
4114 if ct == r'application/mercurial-cbor': | 4187 if ct == r'application/mercurial-cbor': |
4115 ui.write( | 4188 ui.write( |
4116 _('cbor> %s\n') | 4189 _(b'cbor> %s\n') |
4117 % stringutil.pprint( | 4190 % stringutil.pprint( |
4118 cborutil.decodeall(body), bprefix=True, indent=2 | 4191 cborutil.decodeall(body), bprefix=True, indent=2 |
4119 ) | 4192 ) |
4120 ) | 4193 ) |
4121 | 4194 |
4122 elif action == 'close': | 4195 elif action == b'close': |
4123 peer.close() | 4196 peer.close() |
4124 elif action == 'readavailable': | 4197 elif action == b'readavailable': |
4125 if not stdout or not stderr: | 4198 if not stdout or not stderr: |
4126 raise error.Abort(_('readavailable not available on this peer')) | 4199 raise error.Abort( |
4200 _(b'readavailable not available on this peer') | |
4201 ) | |
4127 | 4202 |
4128 stdin.close() | 4203 stdin.close() |
4129 stdout.read() | 4204 stdout.read() |
4130 stderr.read() | 4205 stderr.read() |
4131 | 4206 |
4132 elif action == 'readline': | 4207 elif action == b'readline': |
4133 if not stdout: | 4208 if not stdout: |
4134 raise error.Abort(_('readline not available on this peer')) | 4209 raise error.Abort(_(b'readline not available on this peer')) |
4135 stdout.readline() | 4210 stdout.readline() |
4136 elif action == 'ereadline': | 4211 elif action == b'ereadline': |
4137 if not stderr: | 4212 if not stderr: |
4138 raise error.Abort(_('ereadline not available on this peer')) | 4213 raise error.Abort(_(b'ereadline not available on this peer')) |
4139 stderr.readline() | 4214 stderr.readline() |
4140 elif action.startswith('read '): | 4215 elif action.startswith(b'read '): |
4141 count = int(action.split(' ', 1)[1]) | 4216 count = int(action.split(b' ', 1)[1]) |
4142 if not stdout: | 4217 if not stdout: |
4143 raise error.Abort(_('read not available on this peer')) | 4218 raise error.Abort(_(b'read not available on this peer')) |
4144 stdout.read(count) | 4219 stdout.read(count) |
4145 elif action.startswith('eread '): | 4220 elif action.startswith(b'eread '): |
4146 count = int(action.split(' ', 1)[1]) | 4221 count = int(action.split(b' ', 1)[1]) |
4147 if not stderr: | 4222 if not stderr: |
4148 raise error.Abort(_('eread not available on this peer')) | 4223 raise error.Abort(_(b'eread not available on this peer')) |
4149 stderr.read(count) | 4224 stderr.read(count) |
4150 else: | 4225 else: |
4151 raise error.Abort(_('unknown action: %s') % action) | 4226 raise error.Abort(_(b'unknown action: %s') % action) |
4152 | 4227 |
4153 if batchedcommands is not None: | 4228 if batchedcommands is not None: |
4154 raise error.Abort(_('unclosed "batchbegin" request')) | 4229 raise error.Abort(_(b'unclosed "batchbegin" request')) |
4155 | 4230 |
4156 if peer: | 4231 if peer: |
4157 peer.close() | 4232 peer.close() |
4158 | 4233 |
4159 if proc: | 4234 if proc: |