comparison mercurial/debugcommands.py @ 36560:097ad1079192

debugcommands: support for sending "batch" requests Let's teach `hg debugwireproto` to send "batch" requests. The easiest way to implement this was as a pair of instructions to begin and end a batched operation. Otherwise, we would have to reinvent the parsing wheel or factor out the parsing code. To prove it works, we add a batched request to test-ssh-proto.t. Differential Revision: https://phab.mercurial-scm.org/D2408
author Gregory Szorc <gregory.szorc@gmail.com>
date Fri, 23 Feb 2018 12:50:59 -0800
parents bde0bd50f368
children 5faeabb07cf5
comparison
equal deleted inserted replaced
36559:bde0bd50f368 36560:097ad1079192
2627 namespace bookmarks 2627 namespace bookmarks
2628 2628
2629 Values are interpreted as Python b'' literals. This allows encoding 2629 Values are interpreted as Python b'' literals. This allows encoding
2630 special byte sequences via backslash escaping. 2630 special byte sequences via backslash escaping.
2631 2631
2632 batchbegin
2633 ----------
2634
2635 Instruct the peer to begin a batched send.
2636
2637 All ``command`` blocks are queued for execution until the next
2638 ``batchsubmit`` block.
2639
2640 batchsubmit
2641 -----------
2642
2643 Submit previously queued ``command`` blocks as a batch request.
2644
2645 This action MUST be paired with a ``batchbegin`` action.
2646
2632 close 2647 close
2633 ----- 2648 -----
2634 2649
2635 Close the connection to the server. 2650 Close the connection to the server.
2636 2651
2714 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr) 2729 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr)
2715 2730
2716 else: 2731 else:
2717 raise error.Abort(_('only --localssh is currently supported')) 2732 raise error.Abort(_('only --localssh is currently supported'))
2718 2733
2734 batchedcommands = None
2735
2719 # Now perform actions based on the parsed wire language instructions. 2736 # Now perform actions based on the parsed wire language instructions.
2720 for action, lines in blocks: 2737 for action, lines in blocks:
2721 if action in ('raw', 'raw+'): 2738 if action in ('raw', 'raw+'):
2722 # Concatenate the data together. 2739 # Concatenate the data together.
2723 data = ''.join(l.lstrip() for l in lines) 2740 data = ''.join(l.lstrip() for l in lines)
2745 else: 2762 else:
2746 key, value = fields 2763 key, value = fields
2747 2764
2748 args[key] = util.unescapestr(value) 2765 args[key] = util.unescapestr(value)
2749 2766
2767 if batchedcommands is not None:
2768 batchedcommands.append((command, args))
2769 continue
2770
2750 ui.status(_('sending %s command\n') % command) 2771 ui.status(_('sending %s command\n') % command)
2751 res = peer._call(command, **args) 2772 res = peer._call(command, **args)
2752 ui.status(_('response: %s\n') % util.escapedata(res)) 2773 ui.status(_('response: %s\n') % util.escapedata(res))
2753 2774
2775 elif action == 'batchbegin':
2776 if batchedcommands is not None:
2777 raise error.Abort(_('nested batchbegin not allowed'))
2778
2779 batchedcommands = []
2780 elif action == 'batchsubmit':
2781 # There is a batching API we could go through. But it would be
2782 # difficult to normalize requests into function calls. It is easier
2783 # to bypass this layer and normalize to commands + args.
2784 ui.status(_('sending batch with %d sub-commands\n') %
2785 len(batchedcommands))
2786 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
2787 ui.status(_('response #%d: %s\n') % (i, util.escapedata(chunk)))
2788
2789 batchedcommands = None
2754 elif action == 'close': 2790 elif action == 'close':
2755 peer.close() 2791 peer.close()
2756 elif action == 'readavailable': 2792 elif action == 'readavailable':
2757 fds = util.poll([stdout.fileno(), stderr.fileno()]) 2793 fds = util.poll([stdout.fileno(), stderr.fileno()])
2758 2794
2763 elif action == 'readline': 2799 elif action == 'readline':
2764 stdout.readline() 2800 stdout.readline()
2765 else: 2801 else:
2766 raise error.Abort(_('unknown action: %s') % action) 2802 raise error.Abort(_('unknown action: %s') % action)
2767 2803
2804 if batchedcommands is not None:
2805 raise error.Abort(_('unclosed "batchbegin" request'))
2806
2768 if peer: 2807 if peer:
2769 peer.close() 2808 peer.close()
2770 2809
2771 if proc: 2810 if proc:
2772 proc.kill() 2811 proc.kill()