Mercurial > public > mercurial-scm > hg
comparison hgext/phabricator.py @ 41897:c340a8ac7ef3
phabricator: convert conduit response JSON unicode to bytes inside callconduit
Previously the byte conversion was happening piecemeal in callers, and in the
case of createdifferentialrevision not at all, leading to UnicodeEncodeErrors
when trying to phabsend a commit with a description containing characters not
representable in ascii. (issue6040)
Remove all the scattered encoding.unitolocal calls and perform it once, inside
callconduit, on the entire response dict recursively before returning it, in
keeping with the strategy of converting at the earliest opportunity. Convert all
keys used on returned object to bytes.
Modify the phabsend tests to test this by adding a ? to the commit message of
alpha.
Differential Revision: https://phab.mercurial-scm.org/D6044
author | Ian Moody <moz-ian@perix.co.uk> |
---|---|
date | Sat, 02 Mar 2019 18:48:23 +0000 |
parents | 570e62f1dcf2 |
children | 2bad8f92cebf |
comparison
equal
deleted
inserted
replaced
41896:94faa2e84094 | 41897:c340a8ac7ef3 |
---|---|
58 mdiff, | 58 mdiff, |
59 obsutil, | 59 obsutil, |
60 parser, | 60 parser, |
61 patch, | 61 patch, |
62 phases, | 62 phases, |
63 pycompat, | |
63 registrar, | 64 registrar, |
64 scmutil, | 65 scmutil, |
65 smartset, | 66 smartset, |
66 tags, | 67 tags, |
67 templateutil, | 68 templateutil, |
217 urlopener = urlmod.opener(repo.ui, authinfo) | 218 urlopener = urlmod.opener(repo.ui, authinfo) |
218 request = util.urlreq.request(url, data=data) | 219 request = util.urlreq.request(url, data=data) |
219 with contextlib.closing(urlopener.open(request)) as rsp: | 220 with contextlib.closing(urlopener.open(request)) as rsp: |
220 body = rsp.read() | 221 body = rsp.read() |
221 repo.ui.debug(b'Conduit Response: %s\n' % body) | 222 repo.ui.debug(b'Conduit Response: %s\n' % body) |
222 parsed = json.loads(body) | 223 parsed = pycompat.rapply( |
223 if parsed.get(r'error_code'): | 224 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode) |
225 else x, | |
226 json.loads(body) | |
227 ) | |
228 if parsed.get(b'error_code'): | |
224 msg = (_(b'Conduit Error (%s): %s') | 229 msg = (_(b'Conduit Error (%s): %s') |
225 % (parsed[r'error_code'], parsed[r'error_info'])) | 230 % (parsed[b'error_code'], parsed[b'error_info'])) |
226 raise error.Abort(msg) | 231 raise error.Abort(msg) |
227 return parsed[r'result'] | 232 return parsed[b'result'] |
228 | 233 |
229 @vcrcommand(b'debugcallconduit', [], _(b'METHOD')) | 234 @vcrcommand(b'debugcallconduit', [], _(b'METHOD')) |
230 def debugcallconduit(ui, repo, name): | 235 def debugcallconduit(ui, repo, name): |
231 """call Conduit API | 236 """call Conduit API |
232 | 237 |
247 callsign = repo.ui.config(b'phabricator', b'callsign') | 252 callsign = repo.ui.config(b'phabricator', b'callsign') |
248 if not callsign: | 253 if not callsign: |
249 return None | 254 return None |
250 query = callconduit(repo, b'diffusion.repository.search', | 255 query = callconduit(repo, b'diffusion.repository.search', |
251 {b'constraints': {b'callsigns': [callsign]}}) | 256 {b'constraints': {b'callsigns': [callsign]}}) |
252 if len(query[r'data']) == 0: | 257 if len(query[b'data']) == 0: |
253 return None | 258 return None |
254 repophid = encoding.strtolocal(query[r'data'][0][r'phid']) | 259 repophid = query[b'data'][0][b'phid'] |
255 repo.ui.setconfig(b'phabricator', b'repophid', repophid) | 260 repo.ui.setconfig(b'phabricator', b'repophid', repophid) |
256 return repophid | 261 return repophid |
257 | 262 |
258 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z') | 263 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z') |
259 _differentialrevisiondescre = re.compile( | 264 _differentialrevisiondescre = re.compile( |
303 # Phabricator, and expect precursors overlap with it. | 308 # Phabricator, and expect precursors overlap with it. |
304 if toconfirm: | 309 if toconfirm: |
305 drevs = [drev for force, precs, drev in toconfirm.values()] | 310 drevs = [drev for force, precs, drev in toconfirm.values()] |
306 alldiffs = callconduit(unfi, b'differential.querydiffs', | 311 alldiffs = callconduit(unfi, b'differential.querydiffs', |
307 {b'revisionIDs': drevs}) | 312 {b'revisionIDs': drevs}) |
308 getnode = lambda d: bin(encoding.unitolocal( | 313 getnode = lambda d: bin( |
309 getdiffmeta(d).get(r'node', b''))) or None | 314 getdiffmeta(d).get(b'node', b'')) or None |
310 for newnode, (force, precset, drev) in toconfirm.items(): | 315 for newnode, (force, precset, drev) in toconfirm.items(): |
311 diffs = [d for d in alldiffs.values() | 316 diffs = [d for d in alldiffs.values() |
312 if int(d[r'revisionID']) == drev] | 317 if int(d[b'revisionID']) == drev] |
313 | 318 |
314 # "precursors" as known by Phabricator | 319 # "precursors" as known by Phabricator |
315 phprecset = set(getnode(d) for d in diffs) | 320 phprecset = set(getnode(d) for d in diffs) |
316 | 321 |
317 # Ignore if precursors (Phabricator and local repo) do not overlap, | 322 # Ignore if precursors (Phabricator and local repo) do not overlap, |
326 | 331 |
327 # Find the last node using Phabricator metadata, and make sure it | 332 # Find the last node using Phabricator metadata, and make sure it |
328 # exists in the repo | 333 # exists in the repo |
329 oldnode = lastdiff = None | 334 oldnode = lastdiff = None |
330 if diffs: | 335 if diffs: |
331 lastdiff = max(diffs, key=lambda d: int(d[r'id'])) | 336 lastdiff = max(diffs, key=lambda d: int(d[b'id'])) |
332 oldnode = getnode(lastdiff) | 337 oldnode = getnode(lastdiff) |
333 if oldnode and oldnode not in nodemap: | 338 if oldnode and oldnode not in nodemap: |
334 oldnode = None | 339 oldnode = None |
335 | 340 |
336 result[newnode] = (oldnode, lastdiff, drev) | 341 result[newnode] = (oldnode, lastdiff, drev) |
359 return diff | 364 return diff |
360 | 365 |
361 def writediffproperties(ctx, diff): | 366 def writediffproperties(ctx, diff): |
362 """write metadata to diff so patches could be applied losslessly""" | 367 """write metadata to diff so patches could be applied losslessly""" |
363 params = { | 368 params = { |
364 b'diff_id': diff[r'id'], | 369 b'diff_id': diff[b'id'], |
365 b'name': b'hg:meta', | 370 b'name': b'hg:meta', |
366 b'data': json.dumps({ | 371 b'data': json.dumps({ |
367 b'user': ctx.user(), | 372 b'user': ctx.user(), |
368 b'date': b'%d %d' % ctx.date(), | 373 b'date': b'%d %d' % ctx.date(), |
369 b'node': ctx.hex(), | 374 b'node': ctx.hex(), |
371 }), | 376 }), |
372 } | 377 } |
373 callconduit(ctx.repo(), b'differential.setdiffproperty', params) | 378 callconduit(ctx.repo(), b'differential.setdiffproperty', params) |
374 | 379 |
375 params = { | 380 params = { |
376 b'diff_id': diff[r'id'], | 381 b'diff_id': diff[b'id'], |
377 b'name': b'local:commits', | 382 b'name': b'local:commits', |
378 b'data': json.dumps({ | 383 b'data': json.dumps({ |
379 ctx.hex(): { | 384 ctx.hex(): { |
380 b'author': stringutil.person(ctx.user()), | 385 b'author': stringutil.person(ctx.user()), |
381 b'authorEmail': stringutil.email(ctx.user()), | 386 b'authorEmail': stringutil.email(ctx.user()), |
406 neednewdiff = True | 411 neednewdiff = True |
407 | 412 |
408 transactions = [] | 413 transactions = [] |
409 if neednewdiff: | 414 if neednewdiff: |
410 diff = creatediff(ctx) | 415 diff = creatediff(ctx) |
411 transactions.append({b'type': b'update', b'value': diff[r'phid']}) | 416 transactions.append({b'type': b'update', b'value': diff[b'phid']}) |
412 else: | 417 else: |
413 # Even if we don't need to upload a new diff because the patch content | 418 # Even if we don't need to upload a new diff because the patch content |
414 # does not change. We might still need to update its metadata so | 419 # does not change. We might still need to update its metadata so |
415 # pushers could know the correct node metadata. | 420 # pushers could know the correct node metadata. |
416 assert olddiff | 421 assert olddiff |
431 | 436 |
432 # Parse commit message and update related fields. | 437 # Parse commit message and update related fields. |
433 desc = ctx.description() | 438 desc = ctx.description() |
434 info = callconduit(repo, b'differential.parsecommitmessage', | 439 info = callconduit(repo, b'differential.parsecommitmessage', |
435 {b'corpus': desc}) | 440 {b'corpus': desc}) |
436 for k, v in info[r'fields'].items(): | 441 for k, v in info[b'fields'].items(): |
437 if k in [b'title', b'summary', b'testPlan']: | 442 if k in [b'title', b'summary', b'testPlan']: |
438 transactions.append({b'type': k, b'value': v}) | 443 transactions.append({b'type': k, b'value': v}) |
439 | 444 |
440 params = {b'transactions': transactions} | 445 params = {b'transactions': transactions} |
441 if revid is not None: | 446 if revid is not None: |
453 names = [name.lower() for name in names] | 458 names = [name.lower() for name in names] |
454 query = {b'constraints': {b'usernames': names}} | 459 query = {b'constraints': {b'usernames': names}} |
455 result = callconduit(repo, b'user.search', query) | 460 result = callconduit(repo, b'user.search', query) |
456 # username not found is not an error of the API. So check if we have missed | 461 # username not found is not an error of the API. So check if we have missed |
457 # some names here. | 462 # some names here. |
458 data = result[r'data'] | 463 data = result[b'data'] |
459 resolved = set(entry[r'fields'][r'username'].lower() for entry in data) | 464 resolved = set(entry[b'fields'][b'username'].lower() for entry in data) |
460 unresolved = set(names) - resolved | 465 unresolved = set(names) - resolved |
461 if unresolved: | 466 if unresolved: |
462 raise error.Abort(_(b'unknown username: %s') | 467 raise error.Abort(_(b'unknown username: %s') |
463 % b' '.join(sorted(unresolved))) | 468 % b' '.join(sorted(unresolved))) |
464 return [entry[r'phid'] for entry in data] | 469 return [entry[b'phid'] for entry in data] |
465 | 470 |
466 @vcrcommand(b'phabsend', | 471 @vcrcommand(b'phabsend', |
467 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')), | 472 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')), |
468 (b'', b'amend', True, _(b'update commit messages')), | 473 (b'', b'amend', True, _(b'update commit messages')), |
469 (b'', b'reviewer', [], _(b'specify reviewers')), | 474 (b'', b'reviewer', [], _(b'specify reviewers')), |
536 if oldnode != ctx.node() or opts.get(b'amend'): | 541 if oldnode != ctx.node() or opts.get(b'amend'): |
537 # Create or update Differential Revision | 542 # Create or update Differential Revision |
538 revision, diff = createdifferentialrevision( | 543 revision, diff = createdifferentialrevision( |
539 ctx, revid, lastrevid, oldnode, olddiff, actions) | 544 ctx, revid, lastrevid, oldnode, olddiff, actions) |
540 diffmap[ctx.node()] = diff | 545 diffmap[ctx.node()] = diff |
541 newrevid = int(revision[r'object'][r'id']) | 546 newrevid = int(revision[b'object'][b'id']) |
542 if revid: | 547 if revid: |
543 action = b'updated' | 548 action = b'updated' |
544 else: | 549 else: |
545 action = b'created' | 550 action = b'created' |
546 | 551 |
578 wnode = unfi[b'.'].node() | 583 wnode = unfi[b'.'].node() |
579 mapping = {} # {oldnode: [newnode]} | 584 mapping = {} # {oldnode: [newnode]} |
580 for i, rev in enumerate(revs): | 585 for i, rev in enumerate(revs): |
581 old = unfi[rev] | 586 old = unfi[rev] |
582 drevid = drevids[i] | 587 drevid = drevids[i] |
583 drev = [d for d in drevs if int(d[r'id']) == drevid][0] | 588 drev = [d for d in drevs if int(d[b'id']) == drevid][0] |
584 newdesc = getdescfromdrev(drev) | 589 newdesc = getdescfromdrev(drev) |
585 newdesc = encoding.unitolocal(newdesc) | |
586 # Make sure commit message contain "Differential Revision" | 590 # Make sure commit message contain "Differential Revision" |
587 if old.description() != newdesc: | 591 if old.description() != newdesc: |
588 if old.phase() == phases.public: | 592 if old.phase() == phases.public: |
589 ui.warn(_("warning: not updating public commit %s\n") | 593 ui.warn(_("warning: not updating public commit %s\n") |
590 % scmutil.formatchangeid(old)) | 594 % scmutil.formatchangeid(old)) |
611 if wnode in mapping: | 615 if wnode in mapping: |
612 unfi.setparents(mapping[wnode][0]) | 616 unfi.setparents(mapping[wnode][0]) |
613 | 617 |
614 # Map from "hg:meta" keys to header understood by "hg import". The order is | 618 # Map from "hg:meta" keys to header understood by "hg import". The order is |
615 # consistent with "hg export" output. | 619 # consistent with "hg export" output. |
616 _metanamemap = util.sortdict([(r'user', b'User'), (r'date', b'Date'), | 620 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'), |
617 (r'node', b'Node ID'), (r'parent', b'Parent ')]) | 621 (b'node', b'Node ID'), (b'parent', b'Parent ')]) |
618 | 622 |
619 def _confirmbeforesend(repo, revs, oldmap): | 623 def _confirmbeforesend(repo, revs, oldmap): |
620 url, token = readurltoken(repo) | 624 url, token = readurltoken(repo) |
621 ui = repo.ui | 625 ui = repo.ui |
622 for rev in revs: | 626 for rev in revs: |
642 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed', | 646 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed', |
643 b'abandoned'} | 647 b'abandoned'} |
644 | 648 |
645 def _getstatusname(drev): | 649 def _getstatusname(drev): |
646 """get normalized status name from a Differential Revision""" | 650 """get normalized status name from a Differential Revision""" |
647 return drev[r'statusName'].replace(b' ', b'').lower() | 651 return drev[b'statusName'].replace(b' ', b'').lower() |
648 | 652 |
649 # Small language to specify differential revisions. Support symbols: (), :X, | 653 # Small language to specify differential revisions. Support symbols: (), :X, |
650 # +, and -. | 654 # +, and -. |
651 | 655 |
652 _elements = { | 656 _elements = { |
760 if key in prefetched: | 764 if key in prefetched: |
761 return prefetched[key] | 765 return prefetched[key] |
762 drevs = callconduit(repo, b'differential.query', params) | 766 drevs = callconduit(repo, b'differential.query', params) |
763 # Fill prefetched with the result | 767 # Fill prefetched with the result |
764 for drev in drevs: | 768 for drev in drevs: |
765 prefetched[drev[r'phid']] = drev | 769 prefetched[drev[b'phid']] = drev |
766 prefetched[int(drev[r'id'])] = drev | 770 prefetched[int(drev[b'id'])] = drev |
767 if key not in prefetched: | 771 if key not in prefetched: |
768 raise error.Abort(_(b'cannot get Differential Revision %r') | 772 raise error.Abort(_(b'cannot get Differential Revision %r') |
769 % params) | 773 % params) |
770 return prefetched[key] | 774 return prefetched[key] |
771 | 775 |
775 result = [] | 779 result = [] |
776 queue = [{r'ids': [i]} for i in topdrevids] | 780 queue = [{r'ids': [i]} for i in topdrevids] |
777 while queue: | 781 while queue: |
778 params = queue.pop() | 782 params = queue.pop() |
779 drev = fetch(params) | 783 drev = fetch(params) |
780 if drev[r'id'] in visited: | 784 if drev[b'id'] in visited: |
781 continue | 785 continue |
782 visited.add(drev[r'id']) | 786 visited.add(drev[b'id']) |
783 result.append(int(drev[r'id'])) | 787 result.append(int(drev[b'id'])) |
784 auxiliary = drev.get(r'auxiliary', {}) | 788 auxiliary = drev.get(b'auxiliary', {}) |
785 depends = auxiliary.get(r'phabricator:depends-on', []) | 789 depends = auxiliary.get(b'phabricator:depends-on', []) |
786 for phid in depends: | 790 for phid in depends: |
787 queue.append({b'phids': [phid]}) | 791 queue.append({b'phids': [phid]}) |
788 result.reverse() | 792 result.reverse() |
789 return smartset.baseset(result) | 793 return smartset.baseset(result) |
790 | 794 |
800 # Prefetch Differential Revisions in batch | 804 # Prefetch Differential Revisions in batch |
801 tofetch = set(drevs) | 805 tofetch = set(drevs) |
802 for r in ancestordrevs: | 806 for r in ancestordrevs: |
803 tofetch.update(range(max(1, r - batchsize), r + 1)) | 807 tofetch.update(range(max(1, r - batchsize), r + 1)) |
804 if drevs: | 808 if drevs: |
805 fetch({r'ids': list(tofetch)}) | 809 fetch({b'ids': list(tofetch)}) |
806 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs)) | 810 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs)) |
807 | 811 |
808 # Walk through the tree, return smartsets | 812 # Walk through the tree, return smartsets |
809 def walk(tree): | 813 def walk(tree): |
810 op = tree[0] | 814 op = tree[0] |
834 """get description (commit message) from "Differential Revision" | 838 """get description (commit message) from "Differential Revision" |
835 | 839 |
836 This is similar to differential.getcommitmessage API. But we only care | 840 This is similar to differential.getcommitmessage API. But we only care |
837 about limited fields: title, summary, test plan, and URL. | 841 about limited fields: title, summary, test plan, and URL. |
838 """ | 842 """ |
839 title = drev[r'title'] | 843 title = drev[b'title'] |
840 summary = drev[r'summary'].rstrip() | 844 summary = drev[b'summary'].rstrip() |
841 testplan = drev[r'testPlan'].rstrip() | 845 testplan = drev[b'testPlan'].rstrip() |
842 if testplan: | 846 if testplan: |
843 testplan = b'Test Plan:\n%s' % testplan | 847 testplan = b'Test Plan:\n%s' % testplan |
844 uri = b'Differential Revision: %s' % drev[r'uri'] | 848 uri = b'Differential Revision: %s' % drev[b'uri'] |
845 return b'\n\n'.join(filter(None, [title, summary, testplan, uri])) | 849 return b'\n\n'.join(filter(None, [title, summary, testplan, uri])) |
846 | 850 |
847 def getdiffmeta(diff): | 851 def getdiffmeta(diff): |
848 """get commit metadata (date, node, user, p1) from a diff object | 852 """get commit metadata (date, node, user, p1) from a diff object |
849 | 853 |
879 } | 883 } |
880 | 884 |
881 Note: metadata extracted from "local:commits" will lose time zone | 885 Note: metadata extracted from "local:commits" will lose time zone |
882 information. | 886 information. |
883 """ | 887 """ |
884 props = diff.get(r'properties') or {} | 888 props = diff.get(b'properties') or {} |
885 meta = props.get(r'hg:meta') | 889 meta = props.get(b'hg:meta') |
886 if not meta and props.get(r'local:commits'): | 890 if not meta and props.get(b'local:commits'): |
887 commit = sorted(props[r'local:commits'].values())[0] | 891 commit = sorted(props[b'local:commits'].values())[0] |
888 meta = { | 892 meta = { |
889 r'date': r'%d 0' % commit[r'time'], | 893 b'date': b'%d 0' % commit[b'time'], |
890 r'node': commit[r'rev'], | 894 b'node': commit[b'rev'], |
891 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']), | 895 b'user': b'%s <%s>' % (commit[b'author'], commit[b'authorEmail']), |
892 } | 896 } |
893 if len(commit.get(r'parents', ())) >= 1: | 897 if len(commit.get(b'parents', ())) >= 1: |
894 meta[r'parent'] = commit[r'parents'][0] | 898 meta[b'parent'] = commit[b'parents'][0] |
895 return meta or {} | 899 return meta or {} |
896 | 900 |
897 def readpatch(repo, drevs, write): | 901 def readpatch(repo, drevs, write): |
898 """generate plain-text patch readable by 'hg import' | 902 """generate plain-text patch readable by 'hg import' |
899 | 903 |
900 write is usually ui.write. drevs is what "querydrev" returns, results of | 904 write is usually ui.write. drevs is what "querydrev" returns, results of |
901 "differential.query". | 905 "differential.query". |
902 """ | 906 """ |
903 # Prefetch hg:meta property for all diffs | 907 # Prefetch hg:meta property for all diffs |
904 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs)) | 908 diffids = sorted(set(max(int(v) for v in drev[b'diffs']) for drev in drevs)) |
905 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids}) | 909 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids}) |
906 | 910 |
907 # Generate patch for each drev | 911 # Generate patch for each drev |
908 for drev in drevs: | 912 for drev in drevs: |
909 repo.ui.note(_(b'reading D%s\n') % drev[r'id']) | 913 repo.ui.note(_(b'reading D%s\n') % drev[b'id']) |
910 | 914 |
911 diffid = max(int(v) for v in drev[r'diffs']) | 915 diffid = max(int(v) for v in drev[b'diffs']) |
912 body = callconduit(repo, b'differential.getrawdiff', | 916 body = callconduit(repo, b'differential.getrawdiff', |
913 {b'diffID': diffid}) | 917 {b'diffID': diffid}) |
914 desc = getdescfromdrev(drev) | 918 desc = getdescfromdrev(drev) |
915 header = b'# HG changeset patch\n' | 919 header = b'# HG changeset patch\n' |
916 | 920 |
921 for k in _metanamemap.keys(): | 925 for k in _metanamemap.keys(): |
922 if k in meta: | 926 if k in meta: |
923 header += b'# %s %s\n' % (_metanamemap[k], meta[k]) | 927 header += b'# %s %s\n' % (_metanamemap[k], meta[k]) |
924 | 928 |
925 content = b'%s%s\n%s' % (header, desc, body) | 929 content = b'%s%s\n%s' % (header, desc, body) |
926 write(encoding.unitolocal(content)) | 930 write(content) |
927 | 931 |
928 @vcrcommand(b'phabread', | 932 @vcrcommand(b'phabread', |
929 [(b'', b'stack', False, _(b'read dependencies'))], | 933 [(b'', b'stack', False, _(b'read dependencies'))], |
930 _(b'DREVSPEC [OPTIONS]'), | 934 _(b'DREVSPEC [OPTIONS]'), |
931 helpcategory=command.CATEGORY_IMPORT_EXPORT) | 935 helpcategory=command.CATEGORY_IMPORT_EXPORT) |
977 drevs = querydrev(repo, spec) | 981 drevs = querydrev(repo, spec) |
978 for i, drev in enumerate(drevs): | 982 for i, drev in enumerate(drevs): |
979 if i + 1 == len(drevs) and opts.get(b'comment'): | 983 if i + 1 == len(drevs) and opts.get(b'comment'): |
980 actions.append({b'type': b'comment', b'value': opts[b'comment']}) | 984 actions.append({b'type': b'comment', b'value': opts[b'comment']}) |
981 if actions: | 985 if actions: |
982 params = {b'objectIdentifier': drev[r'phid'], | 986 params = {b'objectIdentifier': drev[b'phid'], |
983 b'transactions': actions} | 987 b'transactions': actions} |
984 callconduit(repo, b'differential.revision.edit', params) | 988 callconduit(repo, b'differential.revision.edit', params) |
985 | 989 |
986 templatekeyword = registrar.templatekeyword() | 990 templatekeyword = registrar.templatekeyword() |
987 | 991 |