contrib/automation/hgautomation/linux.py
changeset 42285 65b3ef162b39
child 42660 24cd5b0ba5b3
equal deleted inserted replaced
42284:195dcc10b3d7 42285:65b3ef162b39
       
     1 # linux.py - Linux specific automation functionality
       
     2 #
       
     3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
       
     4 #
       
     5 # This software may be used and distributed according to the terms of the
       
     6 # GNU General Public License version 2 or any later version.
       
     7 
       
     8 # no-check-code because Python 3 native.
       
     9 
       
    10 import os
       
    11 import pathlib
       
    12 import shlex
       
    13 import subprocess
       
    14 import tempfile
       
    15 
       
    16 from .ssh import (
       
    17     exec_command,
       
    18 )
       
    19 
       
    20 
       
    21 # Linux distributions that are supported.
       
    22 DISTROS = {
       
    23     'debian9',
       
    24     'ubuntu18.04',
       
    25     'ubuntu18.10',
       
    26     'ubuntu19.04',
       
    27 }
       
    28 
       
    29 INSTALL_PYTHONS = r'''
       
    30 PYENV2_VERSIONS="2.7.16 pypy2.7-7.1.1"
       
    31 PYENV3_VERSIONS="3.5.7 3.6.8 3.7.3 3.8-dev pypy3.5-7.0.0 pypy3.6-7.1.1"
       
    32 
       
    33 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
       
    34 pushd /hgdev/pyenv
       
    35 git checkout 3faeda67bb33e07750d1a104271369a7384ca45c
       
    36 popd
       
    37 
       
    38 export PYENV_ROOT="/hgdev/pyenv"
       
    39 export PATH="$PYENV_ROOT/bin:$PATH"
       
    40 
       
    41 # pip 19.0.3.
       
    42 PIP_SHA256=efe99298f3fbb1f56201ce6b81d2658067d2f7d7dfc2d412e0d3cacc9a397c61
       
    43 wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/fee32c376da1ff6496a798986d7939cd51e1644f/get-pip.py
       
    44 echo "${PIP_SHA256} get-pip.py" | sha256sum --check -
       
    45 
       
    46 VIRTUALENV_SHA256=984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39
       
    47 VIRTUALENV_TARBALL=virtualenv-16.4.3.tar.gz
       
    48 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/37/db/89d6b043b22052109da35416abc3c397655e4bd3cff031446ba02b9654fa/${VIRTUALENV_TARBALL}
       
    49 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
       
    50 
       
    51 for v in ${PYENV2_VERSIONS}; do
       
    52     pyenv install -v ${v}
       
    53     ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
       
    54     ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
       
    55     ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
       
    56 done
       
    57 
       
    58 for v in ${PYENV3_VERSIONS}; do
       
    59     pyenv install -v ${v}
       
    60     ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
       
    61     ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt
       
    62 done
       
    63 
       
    64 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
       
    65 '''.lstrip().replace('\r\n', '\n')
       
    66 
       
    67 
       
    68 BOOTSTRAP_VIRTUALENV = r'''
       
    69 /usr/bin/virtualenv /hgdev/venv-bootstrap
       
    70 
       
    71 HG_SHA256=1bdd21bb87d1e05fb5cd395d488d0e0cc2f2f90ce0fd248e31a03595da5ccb47
       
    72 HG_TARBALL=mercurial-4.9.1.tar.gz
       
    73 
       
    74 wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL}
       
    75 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
       
    76 
       
    77 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
       
    78 '''.lstrip().replace('\r\n', '\n')
       
    79 
       
    80 
       
    81 BOOTSTRAP_DEBIAN = r'''
       
    82 #!/bin/bash
       
    83 
       
    84 set -ex
       
    85 
       
    86 DISTRO=`grep DISTRIB_ID /etc/lsb-release  | awk -F= '{{print $2}}'`
       
    87 DEBIAN_VERSION=`cat /etc/debian_version`
       
    88 LSB_RELEASE=`lsb_release -cs`
       
    89 
       
    90 sudo /usr/sbin/groupadd hg
       
    91 sudo /usr/sbin/groupadd docker
       
    92 sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg
       
    93 sudo mkdir /home/hg/.ssh
       
    94 sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys
       
    95 sudo chown -R hg:hg /home/hg/.ssh
       
    96 sudo chmod 700 /home/hg/.ssh
       
    97 sudo chmod 600 /home/hg/.ssh/authorized_keys
       
    98 
       
    99 cat << EOF | sudo tee /etc/sudoers.d/90-hg
       
   100 hg ALL=(ALL) NOPASSWD:ALL
       
   101 EOF
       
   102 
       
   103 sudo apt-get update
       
   104 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade
       
   105 
       
   106 # Install packages necessary to set up Docker Apt repo.
       
   107 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \
       
   108     apt-transport-https \
       
   109     gnupg
       
   110 
       
   111 cat > docker-apt-key << EOF
       
   112 -----BEGIN PGP PUBLIC KEY BLOCK-----
       
   113 
       
   114 mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth
       
   115 lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh
       
   116 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq
       
   117 L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7
       
   118 UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N
       
   119 cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht
       
   120 ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo
       
   121 vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD
       
   122 G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ
       
   123 XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj
       
   124 q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB
       
   125 tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3
       
   126 BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO
       
   127 v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd
       
   128 tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk
       
   129 jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m
       
   130 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P
       
   131 XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc
       
   132 FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8
       
   133 g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm
       
   134 ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh
       
   135 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5
       
   136 G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW
       
   137 FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB
       
   138 EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF
       
   139 M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx
       
   140 Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu
       
   141 w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk
       
   142 z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8
       
   143 eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb
       
   144 VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa
       
   145 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X
       
   146 zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ
       
   147 pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7
       
   148 ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ
       
   149 BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY
       
   150 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp
       
   151 YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI
       
   152 mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES
       
   153 KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7
       
   154 JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ
       
   155 cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0
       
   156 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5
       
   157 U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z
       
   158 VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f
       
   159 irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk
       
   160 SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz
       
   161 QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W
       
   162 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw
       
   163 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe
       
   164 dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y
       
   165 Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR
       
   166 H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh
       
   167 /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ
       
   168 M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S
       
   169 xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O
       
   170 jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG
       
   171 YT90qFF93M3v01BbxP+EIY2/9tiIPbrd
       
   172 =0YYh
       
   173 -----END PGP PUBLIC KEY BLOCK-----
       
   174 EOF
       
   175 
       
   176 sudo apt-key add docker-apt-key
       
   177 
       
   178 if [ "$DEBIAN_VERSION" = "9.8" ]; then
       
   179 cat << EOF | sudo tee -a /etc/apt/sources.list
       
   180 # Need backports for clang-format-6.0
       
   181 deb http://deb.debian.org/debian stretch-backports main
       
   182 
       
   183 # Sources are useful if we want to compile things locally.
       
   184 deb-src http://deb.debian.org/debian stretch main
       
   185 deb-src http://security.debian.org/debian-security stretch/updates main
       
   186 deb-src http://deb.debian.org/debian stretch-updates main
       
   187 deb-src http://deb.debian.org/debian stretch-backports main
       
   188 
       
   189 deb [arch=amd64] https://download.docker.com/linux/debian stretch stable
       
   190 EOF
       
   191 
       
   192 elif [ "$DISTRO" = "Ubuntu" ]; then
       
   193 cat << EOF | sudo tee -a /etc/apt/sources.list
       
   194 deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable
       
   195 EOF
       
   196 
       
   197 fi
       
   198 
       
   199 sudo apt-get update
       
   200 
       
   201 PACKAGES="\
       
   202     btrfs-progs \
       
   203     build-essential \
       
   204     bzr \
       
   205     clang-format-6.0 \
       
   206     cvs \
       
   207     darcs \
       
   208     debhelper \
       
   209     devscripts \
       
   210     dpkg-dev \
       
   211     dstat \
       
   212     emacs \
       
   213     gettext \
       
   214     git \
       
   215     htop \
       
   216     iotop \
       
   217     jfsutils \
       
   218     libbz2-dev \
       
   219     libexpat1-dev \
       
   220     libffi-dev \
       
   221     libgdbm-dev \
       
   222     liblzma-dev \
       
   223     libncurses5-dev \
       
   224     libnss3-dev \
       
   225     libreadline-dev \
       
   226     libsqlite3-dev \
       
   227     libssl-dev \
       
   228     netbase \
       
   229     ntfs-3g \
       
   230     nvme-cli \
       
   231     pyflakes \
       
   232     pyflakes3 \
       
   233     pylint \
       
   234     pylint3 \
       
   235     python-all-dev \
       
   236     python-dev \
       
   237     python-docutils \
       
   238     python-fuzzywuzzy \
       
   239     python-pygments \
       
   240     python-subversion \
       
   241     python-vcr \
       
   242     python3-dev \
       
   243     python3-docutils \
       
   244     python3-fuzzywuzzy \
       
   245     python3-pygments \
       
   246     python3-vcr \
       
   247     rsync \
       
   248     sqlite3 \
       
   249     subversion \
       
   250     tcl-dev \
       
   251     tk-dev \
       
   252     tla \
       
   253     unzip \
       
   254     uuid-dev \
       
   255     vim \
       
   256     virtualenv \
       
   257     wget \
       
   258     xfsprogs \
       
   259     zip \
       
   260     zlib1g-dev"
       
   261 
       
   262 if [ "$DEBIAN_VERSION" = "9.8" ]; then
       
   263     PACKAGES="$PACKAGES linux-perf"
       
   264 elif [ "$DISTRO" = "Ubuntu" ]; then
       
   265     PACKAGES="$PACKAGES linux-tools-common"
       
   266 fi
       
   267 
       
   268 # Ubuntu 19.04 removes monotone.
       
   269 if [ "$LSB_RELEASE" != "disco" ]; then
       
   270     PACKAGES="$PACKAGES monotone"
       
   271 fi
       
   272 
       
   273 # As of April 27, 2019, Docker hasn't published packages for
       
   274 # Ubuntu 19.04 yet.
       
   275 if [ "$LSB_RELEASE" != "disco" ]; then
       
   276     PACKAGES="$PACKAGES docker-ce"
       
   277 fi
       
   278 
       
   279 sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES
       
   280 
       
   281 # Create clang-format symlink so test harness finds it.
       
   282 sudo update-alternatives --install /usr/bin/clang-format clang-format \
       
   283     /usr/bin/clang-format-6.0 1000
       
   284 
       
   285 sudo mkdir /hgdev
       
   286 # Will be normalized to hg:hg later.
       
   287 sudo chown `whoami` /hgdev
       
   288 
       
   289 cp requirements-py2.txt /hgdev/requirements-py2.txt
       
   290 cp requirements-py3.txt /hgdev/requirements-py3.txt
       
   291 
       
   292 # Disable the pip version check because it uses the network and can
       
   293 # be annoying.
       
   294 cat << EOF | sudo tee -a /etc/pip.conf
       
   295 [global]
       
   296 disable-pip-version-check = True
       
   297 EOF
       
   298 
       
   299 {install_pythons}
       
   300 {bootstrap_virtualenv}
       
   301 
       
   302 /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src
       
   303 
       
   304 # Mark the repo as non-publishing.
       
   305 cat >> /hgdev/src/.hg/hgrc << EOF
       
   306 [phases]
       
   307 publish = false
       
   308 EOF
       
   309 
       
   310 sudo chown -R hg:hg /hgdev
       
   311 '''.lstrip().format(
       
   312     install_pythons=INSTALL_PYTHONS,
       
   313     bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV
       
   314 ).replace('\r\n', '\n')
       
   315 
       
   316 
       
   317 # Prepares /hgdev for operations.
       
   318 PREPARE_HGDEV = '''
       
   319 #!/bin/bash
       
   320 
       
   321 set -e
       
   322 
       
   323 FS=$1
       
   324 
       
   325 ensure_device() {
       
   326     if [ -z "${DEVICE}" ]; then
       
   327         echo "could not find block device to format"
       
   328         exit 1
       
   329     fi
       
   330 }
       
   331 
       
   332 # Determine device to partition for extra filesystem.
       
   333 # If only 1 volume is present, it will be the root volume and
       
   334 # should be /dev/nvme0. If multiple volumes are present, the
       
   335 # root volume could be nvme0 or nvme1. Use whichever one doesn't have
       
   336 # a partition.
       
   337 if [ -e /dev/nvme1n1 ]; then
       
   338     if [ -e /dev/nvme0n1p1 ]; then
       
   339         DEVICE=/dev/nvme1n1
       
   340     else
       
   341         DEVICE=/dev/nvme0n1
       
   342     fi
       
   343 else
       
   344     DEVICE=
       
   345 fi
       
   346 
       
   347 sudo mkdir /hgwork
       
   348 
       
   349 if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then
       
   350     ensure_device
       
   351     echo "creating ${FS} filesystem on ${DEVICE}"
       
   352 fi
       
   353 
       
   354 if [ "${FS}" = "default" ]; then
       
   355     :
       
   356 
       
   357 elif [ "${FS}" = "btrfs" ]; then
       
   358     sudo mkfs.btrfs ${DEVICE}
       
   359     sudo mount ${DEVICE} /hgwork
       
   360 
       
   361 elif [ "${FS}" = "ext3" ]; then
       
   362     # lazy_journal_init speeds up filesystem creation at the expense of
       
   363     # integrity if things crash. We are an ephemeral instance, so we don't
       
   364     # care about integrity.
       
   365     sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE}
       
   366     sudo mount ${DEVICE} /hgwork
       
   367 
       
   368 elif [ "${FS}" = "ext4" ]; then
       
   369     sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE}
       
   370     sudo mount ${DEVICE} /hgwork
       
   371 
       
   372 elif [ "${FS}" = "jfs" ]; then
       
   373     sudo mkfs.jfs ${DEVICE}
       
   374     sudo mount ${DEVICE} /hgwork
       
   375 
       
   376 elif [ "${FS}" = "tmpfs" ]; then
       
   377     echo "creating tmpfs volume in /hgwork"
       
   378     sudo mount -t tmpfs -o size=1024M tmpfs /hgwork
       
   379 
       
   380 elif [ "${FS}" = "xfs" ]; then
       
   381     sudo mkfs.xfs ${DEVICE}
       
   382     sudo mount ${DEVICE} /hgwork
       
   383 
       
   384 else
       
   385     echo "unsupported filesystem: ${FS}"
       
   386     exit 1
       
   387 fi
       
   388 
       
   389 echo "/hgwork ready"
       
   390 
       
   391 sudo chown hg:hg /hgwork
       
   392 mkdir /hgwork/tmp
       
   393 chown hg:hg /hgwork/tmp
       
   394 
       
   395 rsync -a /hgdev/src /hgwork/
       
   396 '''.lstrip().replace('\r\n', '\n')
       
   397 
       
   398 
       
   399 HG_UPDATE_CLEAN = '''
       
   400 set -ex
       
   401 
       
   402 HG=/hgdev/venv-bootstrap/bin/hg
       
   403 
       
   404 cd /hgwork/src
       
   405 ${HG} --config extensions.purge= purge --all
       
   406 ${HG} update -C $1
       
   407 ${HG} log -r .
       
   408 '''.lstrip().replace('\r\n', '\n')
       
   409 
       
   410 
       
   411 def prepare_exec_environment(ssh_client, filesystem='default'):
       
   412     """Prepare an EC2 instance to execute things.
       
   413 
       
   414     The AMI has an ``/hgdev`` bootstrapped with various Python installs
       
   415     and a clone of the Mercurial repo.
       
   416 
       
   417     In EC2, EBS volumes launched from snapshots have wonky performance behavior.
       
   418     Notably, blocks have to be copied on first access, which makes volume
       
   419     I/O extremely slow on fresh volumes.
       
   420 
       
   421     Furthermore, we may want to run operations, tests, etc on alternative
       
   422     filesystems so we examine behavior on different filesystems.
       
   423 
       
   424     This function is used to facilitate executing operations on alternate
       
   425     volumes.
       
   426     """
       
   427     sftp = ssh_client.open_sftp()
       
   428 
       
   429     with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh:
       
   430         fh.write(PREPARE_HGDEV)
       
   431         fh.chmod(0o0777)
       
   432 
       
   433     command = 'sudo /hgdev/prepare-hgdev %s' % filesystem
       
   434     chan, stdin, stdout = exec_command(ssh_client, command)
       
   435     stdin.close()
       
   436 
       
   437     for line in stdout:
       
   438         print(line, end='')
       
   439 
       
   440     res = chan.recv_exit_status()
       
   441 
       
   442     if res:
       
   443         raise Exception('non-0 exit code updating working directory; %d'
       
   444                         % res)
       
   445 
       
   446 
       
   447 def synchronize_hg(source_path: pathlib.Path, ec2_instance, revision: str=None):
       
   448     """Synchronize a local Mercurial source path to remote EC2 instance."""
       
   449 
       
   450     with tempfile.TemporaryDirectory() as temp_dir:
       
   451         temp_dir = pathlib.Path(temp_dir)
       
   452 
       
   453         ssh_dir = temp_dir / '.ssh'
       
   454         ssh_dir.mkdir()
       
   455         ssh_dir.chmod(0o0700)
       
   456 
       
   457         public_ip = ec2_instance.public_ip_address
       
   458 
       
   459         ssh_config = ssh_dir / 'config'
       
   460 
       
   461         with ssh_config.open('w', encoding='utf-8') as fh:
       
   462             fh.write('Host %s\n' % public_ip)
       
   463             fh.write('  User hg\n')
       
   464             fh.write('  StrictHostKeyChecking no\n')
       
   465             fh.write('  UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts'))
       
   466             fh.write('  IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
       
   467 
       
   468         if not (source_path / '.hg').is_dir():
       
   469             raise Exception('%s is not a Mercurial repository; synchronization '
       
   470                             'not yet supported' % source_path)
       
   471 
       
   472         env = dict(os.environ)
       
   473         env['HGPLAIN'] = '1'
       
   474         env['HGENCODING'] = 'utf-8'
       
   475 
       
   476         hg_bin = source_path / 'hg'
       
   477 
       
   478         res = subprocess.run(
       
   479             ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
       
   480             cwd=str(source_path), env=env, check=True, capture_output=True)
       
   481 
       
   482         full_revision = res.stdout.decode('ascii')
       
   483 
       
   484         args = [
       
   485             'python2.7', str(hg_bin),
       
   486             '--config', 'ui.ssh=ssh -F %s' % ssh_config,
       
   487             '--config', 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
       
   488             'push', '-f', '-r', full_revision,
       
   489             'ssh://%s//hgwork/src' % public_ip,
       
   490         ]
       
   491 
       
   492         subprocess.run(args, cwd=str(source_path), env=env, check=True)
       
   493 
       
   494         # TODO support synchronizing dirty working directory.
       
   495 
       
   496         sftp = ec2_instance.ssh_client.open_sftp()
       
   497 
       
   498         with sftp.open('/hgdev/hgup', 'wb') as fh:
       
   499             fh.write(HG_UPDATE_CLEAN)
       
   500             fh.chmod(0o0700)
       
   501 
       
   502         chan, stdin, stdout = exec_command(
       
   503             ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision)
       
   504         stdin.close()
       
   505 
       
   506         for line in stdout:
       
   507             print(line, end='')
       
   508 
       
   509         res = chan.recv_exit_status()
       
   510 
       
   511         if res:
       
   512             raise Exception('non-0 exit code updating working directory; %d'
       
   513                             % res)
       
   514 
       
   515 
       
   516 def run_tests(ssh_client, python_version, test_flags=None):
       
   517     """Run tests on a remote Linux machine via an SSH client."""
       
   518     test_flags = test_flags or []
       
   519 
       
   520     print('running tests')
       
   521 
       
   522     if python_version == 'system2':
       
   523         python = '/usr/bin/python2'
       
   524     elif python_version == 'system3':
       
   525         python = '/usr/bin/python3'
       
   526     elif python_version.startswith('pypy'):
       
   527         python = '/hgdev/pyenv/shims/%s' % python_version
       
   528     else:
       
   529         python = '/hgdev/pyenv/shims/python%s' % python_version
       
   530 
       
   531     test_flags = ' '.join(shlex.quote(a) for a in test_flags)
       
   532 
       
   533     command = (
       
   534         '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
       
   535         'cd /hgwork/src/tests && %s run-tests.py %s"' % (
       
   536             python, test_flags))
       
   537 
       
   538     chan, stdin, stdout = exec_command(ssh_client, command)
       
   539 
       
   540     stdin.close()
       
   541 
       
   542     for line in stdout:
       
   543         print(line, end='')
       
   544 
       
   545     return chan.recv_exit_status()