Mercurial > public > mercurial-scm > hg
annotate hgext/largefiles/lfcommands.py @ 15173:3d27a8ff895f
largefiles: mark a string for translation
author | Matt Mackall <mpm@selenic.com> |
---|---|
date | Thu, 29 Sep 2011 17:16:42 -0500 |
parents | fb1dcd2aae2a |
children | 7c604d8c7e83 |
rev | line source |
---|---|
15168 | 1 # Copyright 2009-2010 Gregory P. Ward |
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated | |
3 # Copyright 2010-2011 Fog Creek Software | |
4 # Copyright 2010-2011 Unity Technologies | |
5 # | |
6 # This software may be used and distributed according to the terms of the | |
7 # GNU General Public License version 2 or any later version. | |
8 | |
9 '''High-level command functions: lfadd() et. al, plus the cmdtable.''' | |
10 | |
11 import os | |
12 import shutil | |
13 | |
14 from mercurial import util, match as match_, hg, node, context, error | |
15 from mercurial.i18n import _ | |
16 | |
17 import lfutil | |
18 import basestore | |
19 | |
20 # -- Commands ---------------------------------------------------------- | |
21 | |
22 def lfconvert(ui, src, dest, *pats, **opts): | |
23 '''Convert a normal repository to a largefiles repository | |
24 | |
25 Convert source repository creating an identical repository, except that all | |
26 files that match the patterns given, or are over the given size will be | |
27 added as largefiles. The size used to determine whether or not to track a | |
28 file as a largefile is the size of the first version of the file. After | |
29 running this command you will need to make sure that largefiles is enabled | |
30 anywhere you intend to push the new repository.''' | |
31 | |
32 if opts['tonormal']: | |
33 tolfile = False | |
34 else: | |
35 tolfile = True | |
36 size = opts['size'] | |
37 if not size: | |
38 size = ui.config(lfutil.longname, 'size', default=None) | |
39 try: | |
40 size = int(size) | |
41 except ValueError: | |
15170
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
42 raise util.Abort(_('largefiles.size must be integer, was %s\n') |
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
43 % size) |
15168 | 44 except TypeError: |
45 raise util.Abort(_('size must be specified')) | |
46 | |
47 try: | |
48 rsrc = hg.repository(ui, src) | |
49 if not rsrc.local(): | |
50 raise util.Abort(_('%s is not a local Mercurial repo') % src) | |
51 except error.RepoError, err: | |
52 ui.traceback() | |
53 raise util.Abort(err.args[0]) | |
54 if os.path.exists(dest): | |
55 if not os.path.isdir(dest): | |
56 raise util.Abort(_('destination %s already exists') % dest) | |
57 elif os.listdir(dest): | |
58 raise util.Abort(_('destination %s is not empty') % dest) | |
59 try: | |
60 ui.status(_('initializing destination %s\n') % dest) | |
61 rdst = hg.repository(ui, dest, create=True) | |
62 if not rdst.local(): | |
63 raise util.Abort(_('%s is not a local Mercurial repo') % dest) | |
64 except error.RepoError: | |
65 ui.traceback() | |
66 raise util.Abort(_('%s is not a repo') % dest) | |
67 | |
15171
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
68 success = False |
15168 | 69 try: |
70 # Lock destination to prevent modification while it is converted to. | |
71 # Don't need to lock src because we are just reading from its history | |
72 # which can't change. | |
73 dst_lock = rdst.lock() | |
74 | |
75 # Get a list of all changesets in the source. The easy way to do this | |
76 # is to simply walk the changelog, using changelog.nodesbewteen(). | |
77 # Take a look at mercurial/revlog.py:639 for more details. | |
78 # Use a generator instead of a list to decrease memory usage | |
79 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None, | |
80 rsrc.heads())[0]) | |
81 revmap = {node.nullid: node.nullid} | |
82 if tolfile: | |
83 lfiles = set() | |
84 normalfiles = set() | |
85 if not pats: | |
86 pats = ui.config(lfutil.longname, 'patterns', default=()) | |
87 if pats: | |
88 pats = pats.split(' ') | |
89 if pats: | |
90 matcher = match_.match(rsrc.root, '', list(pats)) | |
91 else: | |
92 matcher = None | |
93 | |
94 lfiletohash = {} | |
95 for ctx in ctxs: | |
96 ui.progress(_('converting revisions'), ctx.rev(), | |
97 unit=_('revision'), total=rsrc['tip'].rev()) | |
98 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, | |
99 lfiles, normalfiles, matcher, size, lfiletohash) | |
100 ui.progress(_('converting revisions'), None) | |
101 | |
102 if os.path.exists(rdst.wjoin(lfutil.shortname)): | |
103 shutil.rmtree(rdst.wjoin(lfutil.shortname)) | |
104 | |
105 for f in lfiletohash.keys(): | |
106 if os.path.isfile(rdst.wjoin(f)): | |
107 os.unlink(rdst.wjoin(f)) | |
108 try: | |
109 os.removedirs(os.path.dirname(rdst.wjoin(f))) | |
15171
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
110 except OSError: |
15168 | 111 pass |
112 | |
113 else: | |
114 for ctx in ctxs: | |
115 ui.progress(_('converting revisions'), ctx.rev(), | |
116 unit=_('revision'), total=rsrc['tip'].rev()) | |
117 _addchangeset(ui, rsrc, rdst, ctx, revmap) | |
118 | |
119 ui.progress(_('converting revisions'), None) | |
15171
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
120 success = True |
15168 | 121 finally: |
15171
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
122 if not success: |
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
123 # we failed, remove the new directory |
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
124 shutil.rmtree(rdst.root) |
15168 | 125 dst_lock.release() |
126 | |
127 def _addchangeset(ui, rsrc, rdst, ctx, revmap): | |
128 # Convert src parents to dst parents | |
129 parents = [] | |
130 for p in ctx.parents(): | |
131 parents.append(revmap[p.node()]) | |
132 while len(parents) < 2: | |
133 parents.append(node.nullid) | |
134 | |
135 # Generate list of changed files | |
136 files = set(ctx.files()) | |
137 if node.nullid not in parents: | |
138 mc = ctx.manifest() | |
139 mp1 = ctx.parents()[0].manifest() | |
140 mp2 = ctx.parents()[1].manifest() | |
141 files |= (set(mp1) | set(mp2)) - set(mc) | |
142 for f in mc: | |
143 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None): | |
144 files.add(f) | |
145 | |
146 def getfilectx(repo, memctx, f): | |
147 if lfutil.standin(f) in files: | |
148 # if the file isn't in the manifest then it was removed | |
149 # or renamed, raise IOError to indicate this | |
150 try: | |
151 fctx = ctx.filectx(lfutil.standin(f)) | |
152 except error.LookupError: | |
153 raise IOError() | |
154 renamed = fctx.renamed() | |
155 if renamed: | |
156 renamed = lfutil.splitstandin(renamed[0]) | |
157 | |
158 hash = fctx.data().strip() | |
159 path = lfutil.findfile(rsrc, hash) | |
160 ### TODO: What if the file is not cached? | |
161 data = '' | |
162 fd = None | |
163 try: | |
164 fd = open(path, 'rb') | |
165 data = fd.read() | |
166 finally: | |
15172
fb1dcd2aae2a
largefiles: fix multistatement line
Matt Mackall <mpm@selenic.com>
parents:
15171
diff
changeset
|
167 if fd: |
fb1dcd2aae2a
largefiles: fix multistatement line
Matt Mackall <mpm@selenic.com>
parents:
15171
diff
changeset
|
168 fd.close() |
15168 | 169 return context.memfilectx(f, data, 'l' in fctx.flags(), |
170 'x' in fctx.flags(), renamed) | |
171 else: | |
172 try: | |
173 fctx = ctx.filectx(f) | |
174 except error.LookupError: | |
175 raise IOError() | |
176 renamed = fctx.renamed() | |
177 if renamed: | |
178 renamed = renamed[0] | |
179 data = fctx.data() | |
180 if f == '.hgtags': | |
181 newdata = [] | |
182 for line in data.splitlines(): | |
183 id, name = line.split(' ', 1) | |
184 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]), | |
185 name)) | |
186 data = ''.join(newdata) | |
187 return context.memfilectx(f, data, 'l' in fctx.flags(), | |
188 'x' in fctx.flags(), renamed) | |
189 | |
190 dstfiles = [] | |
191 for file in files: | |
192 if lfutil.isstandin(file): | |
193 dstfiles.append(lfutil.splitstandin(file)) | |
194 else: | |
195 dstfiles.append(file) | |
196 # Commit | |
197 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles, | |
198 getfilectx, ctx.user(), ctx.date(), ctx.extra()) | |
199 ret = rdst.commitctx(mctx) | |
200 rdst.dirstate.setparents(ret) | |
201 revmap[ctx.node()] = rdst.changelog.tip() | |
202 | |
203 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles, | |
204 matcher, size, lfiletohash): | |
205 # Convert src parents to dst parents | |
206 parents = [] | |
207 for p in ctx.parents(): | |
208 parents.append(revmap[p.node()]) | |
209 while len(parents) < 2: | |
210 parents.append(node.nullid) | |
211 | |
212 # Generate list of changed files | |
213 files = set(ctx.files()) | |
214 if node.nullid not in parents: | |
215 mc = ctx.manifest() | |
216 mp1 = ctx.parents()[0].manifest() | |
217 mp2 = ctx.parents()[1].manifest() | |
218 files |= (set(mp1) | set(mp2)) - set(mc) | |
219 for f in mc: | |
220 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None): | |
221 files.add(f) | |
222 | |
223 dstfiles = [] | |
224 for f in files: | |
225 if f not in lfiles and f not in normalfiles: | |
226 islfile = _islfile(f, ctx, matcher, size) | |
227 # If this file was renamed or copied then copy | |
228 # the lfileness of its predecessor | |
229 if f in ctx.manifest(): | |
230 fctx = ctx.filectx(f) | |
231 renamed = fctx.renamed() | |
232 renamedlfile = renamed and renamed[0] in lfiles | |
233 islfile |= renamedlfile | |
234 if 'l' in fctx.flags(): | |
235 if renamedlfile: | |
236 raise util.Abort( | |
15170
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
237 _('Renamed/copied largefile %s becomes symlink') |
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
238 % f) |
15168 | 239 islfile = False |
240 if islfile: | |
241 lfiles.add(f) | |
242 else: | |
243 normalfiles.add(f) | |
244 | |
245 if f in lfiles: | |
246 dstfiles.append(lfutil.standin(f)) | |
247 # lfile in manifest if it has not been removed/renamed | |
248 if f in ctx.manifest(): | |
249 if 'l' in ctx.filectx(f).flags(): | |
250 if renamed and renamed[0] in lfiles: | |
251 raise util.Abort(_('largefile %s becomes symlink') % f) | |
252 | |
253 # lfile was modified, update standins | |
254 fullpath = rdst.wjoin(f) | |
255 lfutil.createdir(os.path.dirname(fullpath)) | |
256 m = util.sha1('') | |
257 m.update(ctx[f].data()) | |
258 hash = m.hexdigest() | |
259 if f not in lfiletohash or lfiletohash[f] != hash: | |
260 try: | |
261 fd = open(fullpath, 'wb') | |
262 fd.write(ctx[f].data()) | |
263 finally: | |
264 if fd: | |
265 fd.close() | |
266 executable = 'x' in ctx[f].flags() | |
267 os.chmod(fullpath, lfutil.getmode(executable)) | |
268 lfutil.writestandin(rdst, lfutil.standin(f), hash, | |
269 executable) | |
270 lfiletohash[f] = hash | |
271 else: | |
272 # normal file | |
273 dstfiles.append(f) | |
274 | |
275 def getfilectx(repo, memctx, f): | |
276 if lfutil.isstandin(f): | |
277 # if the file isn't in the manifest then it was removed | |
278 # or renamed, raise IOError to indicate this | |
279 srcfname = lfutil.splitstandin(f) | |
280 try: | |
281 fctx = ctx.filectx(srcfname) | |
282 except error.LookupError: | |
283 raise IOError() | |
284 renamed = fctx.renamed() | |
285 if renamed: | |
286 # standin is always a lfile because lfileness | |
287 # doesn't change after rename or copy | |
288 renamed = lfutil.standin(renamed[0]) | |
289 | |
290 return context.memfilectx(f, lfiletohash[srcfname], 'l' in | |
291 fctx.flags(), 'x' in fctx.flags(), renamed) | |
292 else: | |
293 try: | |
294 fctx = ctx.filectx(f) | |
295 except error.LookupError: | |
296 raise IOError() | |
297 renamed = fctx.renamed() | |
298 if renamed: | |
299 renamed = renamed[0] | |
300 | |
301 data = fctx.data() | |
302 if f == '.hgtags': | |
303 newdata = [] | |
304 for line in data.splitlines(): | |
305 id, name = line.split(' ', 1) | |
306 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]), | |
307 name)) | |
308 data = ''.join(newdata) | |
309 return context.memfilectx(f, data, 'l' in fctx.flags(), | |
310 'x' in fctx.flags(), renamed) | |
311 | |
312 # Commit | |
313 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles, | |
314 getfilectx, ctx.user(), ctx.date(), ctx.extra()) | |
315 ret = rdst.commitctx(mctx) | |
316 rdst.dirstate.setparents(ret) | |
317 revmap[ctx.node()] = rdst.changelog.tip() | |
318 | |
319 def _islfile(file, ctx, matcher, size): | |
320 ''' | |
321 A file is a lfile if it matches a pattern or is over | |
322 the given size. | |
323 ''' | |
324 # Never store hgtags or hgignore as lfiles | |
325 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs': | |
326 return False | |
327 if matcher and matcher(file): | |
328 return True | |
329 try: | |
330 return ctx.filectx(file).size() >= size * 1024 * 1024 | |
331 except error.LookupError: | |
332 return False | |
333 | |
334 def uploadlfiles(ui, rsrc, rdst, files): | |
335 '''upload largefiles to the central store''' | |
336 | |
337 # Don't upload locally. All largefiles are in the system wide cache | |
338 # so the other repo can just get them from there. | |
339 if not files or rdst.local(): | |
340 return | |
341 | |
342 store = basestore._openstore(rsrc, rdst, put=True) | |
343 | |
344 at = 0 | |
345 files = filter(lambda h: not store.exists(h), files) | |
346 for hash in files: | |
15170
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
347 ui.progress(_('uploading largefiles'), at, unit='largefile', |
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
348 total=len(files)) |
15168 | 349 source = lfutil.findfile(rsrc, hash) |
350 if not source: | |
15170
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
351 raise util.Abort(_('Missing largefile %s needs to be uploaded') |
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
352 % hash) |
15168 | 353 # XXX check for errors here |
354 store.put(source, hash) | |
355 at += 1 | |
15173
3d27a8ff895f
largefiles: mark a string for translation
Matt Mackall <mpm@selenic.com>
parents:
15172
diff
changeset
|
356 ui.progress(_('uploading largefiles'), None) |
15168 | 357 |
358 def verifylfiles(ui, repo, all=False, contents=False): | |
359 '''Verify that every big file revision in the current changeset | |
360 exists in the central store. With --contents, also verify that | |
361 the contents of each big file revision are correct (SHA-1 hash | |
362 matches the revision ID). With --all, check every changeset in | |
363 this repository.''' | |
364 if all: | |
365 # Pass a list to the function rather than an iterator because we know a | |
366 # list will work. | |
367 revs = range(len(repo)) | |
368 else: | |
369 revs = ['.'] | |
370 | |
371 store = basestore._openstore(repo) | |
372 return store.verify(revs, contents=contents) | |
373 | |
374 def cachelfiles(ui, repo, node): | |
375 '''cachelfiles ensures that all largefiles needed by the specified revision | |
376 are present in the repository's largefile cache. | |
377 | |
378 returns a tuple (cached, missing). cached is the list of files downloaded | |
379 by this operation; missing is the list of files that were needed but could | |
380 not be found.''' | |
381 lfiles = lfutil.listlfiles(repo, node) | |
382 toget = [] | |
383 | |
384 for lfile in lfiles: | |
385 expectedhash = repo[node][lfutil.standin(lfile)].data().strip() | |
386 # if it exists and its hash matches, it might have been locally | |
387 # modified before updating and the user chose 'local'. in this case, | |
388 # it will not be in any store, so don't look for it. | |
389 if (not os.path.exists(repo.wjoin(lfile)) \ | |
390 or expectedhash != lfutil.hashfile(repo.wjoin(lfile))) and \ | |
391 not lfutil.findfile(repo, expectedhash): | |
392 toget.append((lfile, expectedhash)) | |
393 | |
394 if toget: | |
395 store = basestore._openstore(repo) | |
396 ret = store.get(toget) | |
397 return ret | |
398 | |
399 return ([], []) | |
400 | |
401 def updatelfiles(ui, repo, filelist=None, printmessage=True): | |
402 wlock = repo.wlock() | |
403 try: | |
404 lfdirstate = lfutil.openlfdirstate(ui, repo) | |
405 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate) | |
406 | |
407 if filelist is not None: | |
408 lfiles = [f for f in lfiles if f in filelist] | |
409 | |
410 printed = False | |
411 if printmessage and lfiles: | |
412 ui.status(_('getting changed largefiles\n')) | |
413 printed = True | |
414 cachelfiles(ui, repo, '.') | |
415 | |
416 updated, removed = 0, 0 | |
417 for i in map(lambda f: _updatelfile(repo, lfdirstate, f), lfiles): | |
418 # increment the appropriate counter according to _updatelfile's | |
419 # return value | |
420 updated += i > 0 and i or 0 | |
421 removed -= i < 0 and i or 0 | |
422 if printmessage and (removed or updated) and not printed: | |
423 ui.status(_('getting changed largefiles\n')) | |
424 printed = True | |
425 | |
426 lfdirstate.write() | |
427 if printed and printmessage: | |
428 ui.status(_('%d largefiles updated, %d removed\n') % (updated, | |
429 removed)) | |
430 finally: | |
431 wlock.release() | |
432 | |
433 def _updatelfile(repo, lfdirstate, lfile): | |
434 '''updates a single largefile and copies the state of its standin from | |
435 the repository's dirstate to its state in the lfdirstate. | |
436 | |
437 returns 1 if the file was modified, -1 if the file was removed, 0 if the | |
438 file was unchanged, and None if the needed largefile was missing from the | |
439 cache.''' | |
440 ret = 0 | |
441 abslfile = repo.wjoin(lfile) | |
442 absstandin = repo.wjoin(lfutil.standin(lfile)) | |
443 if os.path.exists(absstandin): | |
444 if os.path.exists(absstandin+'.orig'): | |
445 shutil.copyfile(abslfile, abslfile+'.orig') | |
446 expecthash = lfutil.readstandin(repo, lfile) | |
447 if expecthash != '' and \ | |
448 (not os.path.exists(abslfile) or \ | |
449 expecthash != lfutil.hashfile(abslfile)): | |
450 if not lfutil.copyfromcache(repo, expecthash, lfile): | |
451 return None # don't try to set the mode or update the dirstate | |
452 ret = 1 | |
453 mode = os.stat(absstandin).st_mode | |
454 if mode != os.stat(abslfile).st_mode: | |
455 os.chmod(abslfile, mode) | |
456 ret = 1 | |
457 else: | |
458 if os.path.exists(abslfile): | |
459 os.unlink(abslfile) | |
460 ret = -1 | |
461 state = repo.dirstate[lfutil.standin(lfile)] | |
462 if state == 'n': | |
463 lfdirstate.normal(lfile) | |
464 elif state == 'r': | |
465 lfdirstate.remove(lfile) | |
466 elif state == 'a': | |
467 lfdirstate.add(lfile) | |
468 elif state == '?': | |
469 try: | |
470 # Mercurial >= 1.9 | |
471 lfdirstate.drop(lfile) | |
472 except AttributeError: | |
473 # Mercurial <= 1.8 | |
474 lfdirstate.forget(lfile) | |
475 return ret | |
476 | |
477 # -- hg commands declarations ------------------------------------------------ | |
478 | |
479 | |
480 cmdtable = { | |
481 'lfconvert': (lfconvert, | |
482 [('s', 'size', 0, 'All files over this size (in megabytes) ' | |
15170
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
483 'will be considered largefiles. This can also be specified ' |
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
484 'in your hgrc as [largefiles].size.'), |
15168 | 485 ('','tonormal',False, |
486 'Convert from a largefiles repo to a normal repo')], | |
487 _('hg lfconvert SOURCE DEST [FILE ...]')), | |
488 } |