Mercurial > public > mercurial-scm > hg
view mercurial/archival.py @ 33499:0407a51b9d8c
codemod: register core configitems using a script
This is done by a script [2] using RedBaron [1], a tool designed for doing
code refactoring. All "default" values are decided by the script and are
strongly consistent with the existing code.
There are 2 changes done manually to fix tests:
[warn] mercurial/exchange.py: experimental.bundle2-output-capture: default needs manual removal
[warn] mercurial/localrepo.py: experimental.hook-track-tags: default needs manual removal
Since RedBaron is not confident about how to indent things [2].
[1]: https://github.com/PyCQA/redbaron
[2]: https://github.com/PyCQA/redbaron/issues/100
[3]:
#!/usr/bin/env python
# codemod_configitems.py - codemod tool to fill configitems
#
# Copyright 2017 Facebook, Inc.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import, print_function
import os
import sys
import redbaron
def readpath(path):
with open(path) as f:
return f.read()
def writepath(path, content):
with open(path, 'w') as f:
f.write(content)
_configmethods = {'config', 'configbool', 'configint', 'configbytes',
'configlist', 'configdate'}
def extractstring(rnode):
"""get the string from a RedBaron string or call_argument node"""
while rnode.type != 'string':
rnode = rnode.value
return rnode.value[1:-1] # unquote, "'str'" -> "str"
def uiconfigitems(red):
"""match *.ui.config* pattern, yield (node, method, args, section, name)"""
for node in red.find_all('atomtrailers'):
entry = None
try:
obj = node[-3].value
method = node[-2].value
args = node[-1]
section = args[0].value
name = args[1].value
if (obj in ('ui', 'self') and method in _configmethods
and section.type == 'string' and name.type == 'string'):
entry = (node, method, args, extractstring(section),
extractstring(name))
except Exception:
pass
else:
if entry:
yield entry
def coreconfigitems(red):
"""match coreconfigitem(...) pattern, yield (node, args, section, name)"""
for node in red.find_all('atomtrailers'):
entry = None
try:
args = node[1]
section = args[0].value
name = args[1].value
if (node[0].value == 'coreconfigitem' and section.type == 'string'
and name.type == 'string'):
entry = (node, args, extractstring(section),
extractstring(name))
except Exception:
pass
else:
if entry:
yield entry
def registercoreconfig(cfgred, section, name, defaultrepr):
"""insert coreconfigitem to cfgred AST
section and name are plain string, defaultrepr is a string
"""
# find a place to insert the "coreconfigitem" item
entries = list(coreconfigitems(cfgred))
for node, args, nodesection, nodename in reversed(entries):
if (nodesection, nodename) < (section, name):
# insert after this entry
node.insert_after(
'coreconfigitem(%r, %r,\n'
' default=%s,\n'
')' % (section, name, defaultrepr))
return
def main(argv):
if not argv:
print('Usage: codemod_configitems.py FILES\n'
'For example, FILES could be "{hgext,mercurial}/*/**.py"')
dirname = os.path.dirname
reporoot = dirname(dirname(dirname(os.path.abspath(__file__))))
# register configitems to this destination
cfgpath = os.path.join(reporoot, 'mercurial', 'configitems.py')
cfgred = redbaron.RedBaron(readpath(cfgpath))
# state about what to do
registered = set((s, n) for n, a, s, n in coreconfigitems(cfgred))
toregister = {} # {(section, name): defaultrepr}
coreconfigs = set() # {(section, name)}, whether it's used in core
# first loop: scan all files before taking any action
for i, path in enumerate(argv):
print('(%d/%d) scanning %s' % (i + 1, len(argv), path))
iscore = ('mercurial' in path) and ('hgext' not in path)
red = redbaron.RedBaron(readpath(path))
# find all repo.ui.config* and ui.config* calls, and collect their
# section, name and default value information.
for node, method, args, section, name in uiconfigitems(red):
if section == 'web':
# [web] section has some weirdness, ignore them for now
continue
defaultrepr = None
key = (section, name)
if len(args) == 2:
if key in registered:
continue
if method == 'configlist':
defaultrepr = 'list'
elif method == 'configbool':
defaultrepr = 'False'
else:
defaultrepr = 'None'
elif len(args) >= 3 and (args[2].target is None or
args[2].target.value == 'default'):
# try to understand the "default" value
dnode = args[2].value
if dnode.type == 'name':
if dnode.value in {'None', 'True', 'False'}:
defaultrepr = dnode.value
elif dnode.type == 'string':
defaultrepr = repr(dnode.value[1:-1])
elif dnode.type in ('int', 'float'):
defaultrepr = dnode.value
# inconsistent default
if key in toregister and toregister[key] != defaultrepr:
defaultrepr = None
# interesting to rewrite
if key not in registered:
if defaultrepr is None:
print('[note] %s: %s.%s: unsupported default'
% (path, section, name))
registered.add(key) # skip checking it again
else:
toregister[key] = defaultrepr
if iscore:
coreconfigs.add(key)
# second loop: rewrite files given "toregister" result
for path in argv:
# reconstruct redbaron - trade CPU for memory
red = redbaron.RedBaron(readpath(path))
changed = False
for node, method, args, section, name in uiconfigitems(red):
key = (section, name)
defaultrepr = toregister.get(key)
if defaultrepr is None or key not in coreconfigs:
continue
if len(args) >= 3 and (args[2].target is None or
args[2].target.value == 'default'):
try:
del args[2]
changed = True
except Exception:
# redbaron fails to do the rewrite due to indentation
# see https://github.com/PyCQA/redbaron/issues/100
print('[warn] %s: %s.%s: default needs manual removal'
% (path, section, name))
if key not in registered:
print('registering %s.%s' % (section, name))
registercoreconfig(cfgred, section, name, defaultrepr)
registered.add(key)
if changed:
print('updating %s' % path)
writepath(path, red.dumps())
if toregister:
print('updating configitems.py')
writepath(cfgpath, cfgred.dumps())
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
author | Jun Wu <quark@fb.com> |
---|---|
date | Fri, 14 Jul 2017 14:22:40 -0700 |
parents | 3047167733dc |
children | 4c4e95cae33a |
line wrap: on
line source
# archival.py - revision archival for mercurial # # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import gzip import os import struct import tarfile import time import zipfile import zlib from .i18n import _ from . import ( cmdutil, encoding, error, match as matchmod, util, vfs as vfsmod, ) stringio = util.stringio # from unzip source code: _UNX_IFREG = 0x8000 _UNX_IFLNK = 0xa000 def tidyprefix(dest, kind, prefix): '''choose prefix to use for names in archive. make sure prefix is safe for consumers.''' if prefix: prefix = util.normpath(prefix) else: if not isinstance(dest, str): raise ValueError('dest must be string if no prefix') prefix = os.path.basename(dest) lower = prefix.lower() for sfx in exts.get(kind, []): if lower.endswith(sfx): prefix = prefix[:-len(sfx)] break lpfx = os.path.normpath(util.localpath(prefix)) prefix = util.pconvert(lpfx) if not prefix.endswith('/'): prefix += '/' # Drop the leading '.' path component if present, so Windows can read the # zip files (issue4634) if prefix.startswith('./'): prefix = prefix[2:] if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix: raise error.Abort(_('archive prefix contains illegal components')) return prefix exts = { 'tar': ['.tar'], 'tbz2': ['.tbz2', '.tar.bz2'], 'tgz': ['.tgz', '.tar.gz'], 'zip': ['.zip'], } def guesskind(dest): for kind, extensions in exts.iteritems(): if any(dest.endswith(ext) for ext in extensions): return kind return None def _rootctx(repo): # repo[0] may be hidden for rev in repo: return repo[rev] return repo['null'] def buildmetadata(ctx): '''build content of .hg_archival.txt''' repo = ctx.repo() hex = ctx.hex() if ctx.rev() is None: hex = ctx.p1().hex() if ctx.dirty(missing=True): hex += '+' base = 'repo: %s\nnode: %s\nbranch: %s\n' % ( _rootctx(repo).hex(), hex, encoding.fromlocal(ctx.branch())) tags = ''.join('tag: %s\n' % t for t in ctx.tags() if repo.tagtype(t) == 'global') if not tags: repo.ui.pushbuffer() opts = {'template': '{latesttag}\n{latesttagdistance}\n' '{changessincelatesttag}', 'style': '', 'patch': None, 'git': None} cmdutil.show_changeset(repo.ui, repo, opts).show(ctx) ltags, dist, changessince = repo.ui.popbuffer().split('\n') ltags = ltags.split(':') tags = ''.join('latesttag: %s\n' % t for t in ltags) tags += 'latesttagdistance: %s\n' % dist tags += 'changessincelatesttag: %s\n' % changessince return base + tags class tarit(object): '''write archive to tar file or stream. can write uncompressed, or compress with gzip or bzip2.''' class GzipFileWithTime(gzip.GzipFile): def __init__(self, *args, **kw): timestamp = None if 'timestamp' in kw: timestamp = kw.pop('timestamp') if timestamp is None: self.timestamp = time.time() else: self.timestamp = timestamp gzip.GzipFile.__init__(self, *args, **kw) def _write_gzip_header(self): self.fileobj.write('\037\213') # magic header self.fileobj.write('\010') # compression method fname = self.name if fname and fname.endswith('.gz'): fname = fname[:-3] flags = 0 if fname: flags = gzip.FNAME self.fileobj.write(chr(flags)) gzip.write32u(self.fileobj, long(self.timestamp)) self.fileobj.write('\002') self.fileobj.write('\377') if fname: self.fileobj.write(fname + '\000') def __init__(self, dest, mtime, kind=''): self.mtime = mtime self.fileobj = None def taropen(mode, name='', fileobj=None): if kind == 'gz': mode = mode[0] if not fileobj: fileobj = open(name, mode + 'b') gzfileobj = self.GzipFileWithTime(name, mode + 'b', zlib.Z_BEST_COMPRESSION, fileobj, timestamp=mtime) self.fileobj = gzfileobj return tarfile.TarFile.taropen(name, mode, gzfileobj) else: return tarfile.open(name, mode + kind, fileobj) if isinstance(dest, str): self.z = taropen('w:', name=dest) else: self.z = taropen('w|', fileobj=dest) def addfile(self, name, mode, islink, data): i = tarfile.TarInfo(name) i.mtime = self.mtime i.size = len(data) if islink: i.type = tarfile.SYMTYPE i.mode = 0o777 i.linkname = data data = None i.size = 0 else: i.mode = mode data = stringio(data) self.z.addfile(i, data) def done(self): self.z.close() if self.fileobj: self.fileobj.close() class tellable(object): '''provide tell method for zipfile.ZipFile when writing to http response file object.''' def __init__(self, fp): self.fp = fp self.offset = 0 def __getattr__(self, key): return getattr(self.fp, key) def write(self, s): self.fp.write(s) self.offset += len(s) def tell(self): return self.offset class zipit(object): '''write archive to zip file or stream. can write uncompressed, or compressed with deflate.''' def __init__(self, dest, mtime, compress=True): if not isinstance(dest, str): try: dest.tell() except (AttributeError, IOError): dest = tellable(dest) self.z = zipfile.ZipFile(dest, 'w', compress and zipfile.ZIP_DEFLATED or zipfile.ZIP_STORED) # Python's zipfile module emits deprecation warnings if we try # to store files with a date before 1980. epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0)) if mtime < epoch: mtime = epoch self.mtime = mtime self.date_time = time.gmtime(mtime)[:6] def addfile(self, name, mode, islink, data): i = zipfile.ZipInfo(name, self.date_time) i.compress_type = self.z.compression # unzip will not honor unix file modes unless file creator is # set to unix (id 3). i.create_system = 3 ftype = _UNX_IFREG if islink: mode = 0o777 ftype = _UNX_IFLNK i.external_attr = (mode | ftype) << 16 # add "extended-timestamp" extra block, because zip archives # without this will be extracted with unexpected timestamp, # if TZ is not configured as GMT i.extra += struct.pack('<hhBl', 0x5455, # block type: "extended-timestamp" 1 + 4, # size of this block 1, # "modification time is present" int(self.mtime)) # last modification (UTC) self.z.writestr(i, data) def done(self): self.z.close() class fileit(object): '''write archive as files in directory.''' def __init__(self, name, mtime): self.basedir = name self.opener = vfsmod.vfs(self.basedir) def addfile(self, name, mode, islink, data): if islink: self.opener.symlink(data, name) return f = self.opener(name, "w", atomictemp=True) f.write(data) f.close() destfile = os.path.join(self.basedir, name) os.chmod(destfile, mode) def done(self): pass archivers = { 'files': fileit, 'tar': tarit, 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'), 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'), 'uzip': lambda name, mtime: zipit(name, mtime, False), 'zip': zipit, } def archive(repo, dest, node, kind, decode=True, matchfn=None, prefix='', mtime=None, subrepos=False): '''create archive of repo as it was at node. dest can be name of directory, name of archive file, or file object to write archive to. kind is type of archive to create. decode tells whether to put files through decode filters from hgrc. matchfn is function to filter names of files to write to archive. prefix is name of path to put before every archive member.''' if kind == 'files': if prefix: raise error.Abort(_('cannot give prefix when archiving to files')) else: prefix = tidyprefix(dest, kind, prefix) def write(name, mode, islink, getdata): data = getdata() if decode: data = repo.wwritedata(name, data) archiver.addfile(prefix + name, mode, islink, data) if kind not in archivers: raise error.Abort(_("unknown archive type '%s'") % kind) ctx = repo[node] archiver = archivers[kind](dest, mtime or ctx.date()[0]) if repo.ui.configbool("ui", "archivemeta"): name = '.hg_archival.txt' if not matchfn or matchfn(name): write(name, 0o644, False, lambda: buildmetadata(ctx)) if matchfn: files = [f for f in ctx.manifest().keys() if matchfn(f)] else: files = ctx.manifest().keys() total = len(files) if total: files.sort() repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total) for i, f in enumerate(files): ff = ctx.flags(f) write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, ctx[f].data) repo.ui.progress(_('archiving'), i + 1, item=f, unit=_('files'), total=total) repo.ui.progress(_('archiving'), None) if subrepos: for subpath in sorted(ctx.substate): sub = ctx.workingsub(subpath) submatch = matchmod.subdirmatcher(subpath, matchfn) total += sub.archive(archiver, prefix, submatch, decode) if total == 0: raise error.Abort(_('no files match the archive pattern')) archiver.done() return total