Mercurial > public > mercurial-scm > hg
annotate hgext/largefiles/lfcommands.py @ 15339:be1377d19018 stable
largefiles: test lfconvert error handling; remove redundant code
author | Greg Ward <greg@gerg.ca> |
---|---|
date | Sat, 22 Oct 2011 14:17:19 -0400 |
parents | 0db47b8d025f |
children | 0e58513cc59a |
rev | line source |
---|---|
15168 | 1 # Copyright 2009-2010 Gregory P. Ward |
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated | |
3 # Copyright 2010-2011 Fog Creek Software | |
4 # Copyright 2010-2011 Unity Technologies | |
5 # | |
6 # This software may be used and distributed according to the terms of the | |
7 # GNU General Public License version 2 or any later version. | |
8 | |
15252
6e809bb4f969
largefiles: improve comments, internal docstrings
Greg Ward <greg@gerg.ca>
parents:
15230
diff
changeset
|
9 '''High-level command function for lfconvert, plus the cmdtable.''' |
15168 | 10 |
11 import os | |
12 import shutil | |
13 | |
14 from mercurial import util, match as match_, hg, node, context, error | |
15 from mercurial.i18n import _ | |
16 | |
17 import lfutil | |
18 import basestore | |
19 | |
20 # -- Commands ---------------------------------------------------------- | |
21 | |
22 def lfconvert(ui, src, dest, *pats, **opts): | |
15230 | 23 '''convert a normal repository to a largefiles repository |
15168 | 24 |
15230 | 25 Convert repository SOURCE to a new repository DEST, identical to |
26 SOURCE except that certain files will be converted as largefiles: | |
27 specifically, any file that matches any PATTERN *or* whose size is | |
28 above the minimum size threshold is converted as a largefile. The | |
29 size used to determine whether or not to track a file as a | |
30 largefile is the size of the first version of the file. The | |
31 minimum size can be specified either with --size or in | |
32 configuration as ``largefiles.size``. | |
33 | |
34 After running this command you will need to make sure that | |
35 largefiles is enabled anywhere you intend to push the new | |
36 repository. | |
37 | |
15332
0db47b8d025f
largefiles: rename lfconvert --tonormal option to --to-normal
Greg Ward <greg@gerg.ca>
parents:
15317
diff
changeset
|
38 Use --to-normal to convert largefiles back to normal files; after |
15230 | 39 this, the DEST repository can be used without largefiles at all.''' |
15168 | 40 |
15332
0db47b8d025f
largefiles: rename lfconvert --tonormal option to --to-normal
Greg Ward <greg@gerg.ca>
parents:
15317
diff
changeset
|
41 if opts['to_normal']: |
15168 | 42 tolfile = False |
43 else: | |
44 tolfile = True | |
15227
a7686abf73a6
largefiles: factor out lfutil.getminsize()
Greg Ward <greg@gerg.ca>
parents:
15224
diff
changeset
|
45 size = lfutil.getminsize(ui, True, opts.get('size'), default=None) |
15339
be1377d19018
largefiles: test lfconvert error handling; remove redundant code
Greg Ward <greg@gerg.ca>
parents:
15332
diff
changeset
|
46 rsrc = hg.repository(ui, src) |
be1377d19018
largefiles: test lfconvert error handling; remove redundant code
Greg Ward <greg@gerg.ca>
parents:
15332
diff
changeset
|
47 if not rsrc.local(): |
be1377d19018
largefiles: test lfconvert error handling; remove redundant code
Greg Ward <greg@gerg.ca>
parents:
15332
diff
changeset
|
48 raise util.Abort(_('%s is not a local Mercurial repo') % src) |
be1377d19018
largefiles: test lfconvert error handling; remove redundant code
Greg Ward <greg@gerg.ca>
parents:
15332
diff
changeset
|
49 |
be1377d19018
largefiles: test lfconvert error handling; remove redundant code
Greg Ward <greg@gerg.ca>
parents:
15332
diff
changeset
|
50 ui.status(_('initializing destination %s\n') % dest) |
be1377d19018
largefiles: test lfconvert error handling; remove redundant code
Greg Ward <greg@gerg.ca>
parents:
15332
diff
changeset
|
51 rdst = hg.repository(ui, dest, create=True) |
be1377d19018
largefiles: test lfconvert error handling; remove redundant code
Greg Ward <greg@gerg.ca>
parents:
15332
diff
changeset
|
52 if not rdst.local(): |
be1377d19018
largefiles: test lfconvert error handling; remove redundant code
Greg Ward <greg@gerg.ca>
parents:
15332
diff
changeset
|
53 raise util.Abort(_('%s is not a local Mercurial repo') % dest) |
15168 | 54 |
15171
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
55 success = False |
15168 | 56 try: |
57 # Lock destination to prevent modification while it is converted to. | |
58 # Don't need to lock src because we are just reading from its history | |
59 # which can't change. | |
60 dst_lock = rdst.lock() | |
61 | |
62 # Get a list of all changesets in the source. The easy way to do this | |
63 # is to simply walk the changelog, using changelog.nodesbewteen(). | |
64 # Take a look at mercurial/revlog.py:639 for more details. | |
65 # Use a generator instead of a list to decrease memory usage | |
66 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None, | |
67 rsrc.heads())[0]) | |
68 revmap = {node.nullid: node.nullid} | |
69 if tolfile: | |
70 lfiles = set() | |
71 normalfiles = set() | |
72 if not pats: | |
73 pats = ui.config(lfutil.longname, 'patterns', default=()) | |
74 if pats: | |
75 pats = pats.split(' ') | |
76 if pats: | |
77 matcher = match_.match(rsrc.root, '', list(pats)) | |
78 else: | |
79 matcher = None | |
80 | |
81 lfiletohash = {} | |
82 for ctx in ctxs: | |
83 ui.progress(_('converting revisions'), ctx.rev(), | |
84 unit=_('revision'), total=rsrc['tip'].rev()) | |
85 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, | |
86 lfiles, normalfiles, matcher, size, lfiletohash) | |
87 ui.progress(_('converting revisions'), None) | |
88 | |
89 if os.path.exists(rdst.wjoin(lfutil.shortname)): | |
90 shutil.rmtree(rdst.wjoin(lfutil.shortname)) | |
91 | |
92 for f in lfiletohash.keys(): | |
93 if os.path.isfile(rdst.wjoin(f)): | |
94 os.unlink(rdst.wjoin(f)) | |
95 try: | |
96 os.removedirs(os.path.dirname(rdst.wjoin(f))) | |
15171
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
97 except OSError: |
15168 | 98 pass |
99 | |
15303
07811b3b119b
largefiles: include 'largefiles' in converted repository requirements
Eli Carter <eli.carter@tektronix.com>
parents:
15255
diff
changeset
|
100 # If there were any files converted to largefiles, add largefiles |
07811b3b119b
largefiles: include 'largefiles' in converted repository requirements
Eli Carter <eli.carter@tektronix.com>
parents:
15255
diff
changeset
|
101 # to the destination repository's requirements. |
07811b3b119b
largefiles: include 'largefiles' in converted repository requirements
Eli Carter <eli.carter@tektronix.com>
parents:
15255
diff
changeset
|
102 if lfiles: |
07811b3b119b
largefiles: include 'largefiles' in converted repository requirements
Eli Carter <eli.carter@tektronix.com>
parents:
15255
diff
changeset
|
103 rdst.requirements.add('largefiles') |
07811b3b119b
largefiles: include 'largefiles' in converted repository requirements
Eli Carter <eli.carter@tektronix.com>
parents:
15255
diff
changeset
|
104 rdst._writerequirements() |
15168 | 105 else: |
106 for ctx in ctxs: | |
107 ui.progress(_('converting revisions'), ctx.rev(), | |
108 unit=_('revision'), total=rsrc['tip'].rev()) | |
109 _addchangeset(ui, rsrc, rdst, ctx, revmap) | |
110 | |
111 ui.progress(_('converting revisions'), None) | |
15171
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
112 success = True |
15168 | 113 finally: |
15171
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
114 if not success: |
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
115 # we failed, remove the new directory |
547da6115d1d
largefiles: eliminate naked exceptions
Matt Mackall <mpm@selenic.com>
parents:
15170
diff
changeset
|
116 shutil.rmtree(rdst.root) |
15168 | 117 dst_lock.release() |
118 | |
119 def _addchangeset(ui, rsrc, rdst, ctx, revmap): | |
120 # Convert src parents to dst parents | |
121 parents = [] | |
122 for p in ctx.parents(): | |
123 parents.append(revmap[p.node()]) | |
124 while len(parents) < 2: | |
125 parents.append(node.nullid) | |
126 | |
127 # Generate list of changed files | |
128 files = set(ctx.files()) | |
129 if node.nullid not in parents: | |
130 mc = ctx.manifest() | |
131 mp1 = ctx.parents()[0].manifest() | |
132 mp2 = ctx.parents()[1].manifest() | |
133 files |= (set(mp1) | set(mp2)) - set(mc) | |
134 for f in mc: | |
135 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None): | |
136 files.add(f) | |
137 | |
138 def getfilectx(repo, memctx, f): | |
139 if lfutil.standin(f) in files: | |
140 # if the file isn't in the manifest then it was removed | |
141 # or renamed, raise IOError to indicate this | |
142 try: | |
143 fctx = ctx.filectx(lfutil.standin(f)) | |
144 except error.LookupError: | |
145 raise IOError() | |
146 renamed = fctx.renamed() | |
147 if renamed: | |
148 renamed = lfutil.splitstandin(renamed[0]) | |
149 | |
150 hash = fctx.data().strip() | |
151 path = lfutil.findfile(rsrc, hash) | |
152 ### TODO: What if the file is not cached? | |
153 data = '' | |
154 fd = None | |
155 try: | |
156 fd = open(path, 'rb') | |
157 data = fd.read() | |
158 finally: | |
15172
fb1dcd2aae2a
largefiles: fix multistatement line
Matt Mackall <mpm@selenic.com>
parents:
15171
diff
changeset
|
159 if fd: |
fb1dcd2aae2a
largefiles: fix multistatement line
Matt Mackall <mpm@selenic.com>
parents:
15171
diff
changeset
|
160 fd.close() |
15168 | 161 return context.memfilectx(f, data, 'l' in fctx.flags(), |
162 'x' in fctx.flags(), renamed) | |
163 else: | |
164 try: | |
165 fctx = ctx.filectx(f) | |
166 except error.LookupError: | |
167 raise IOError() | |
168 renamed = fctx.renamed() | |
169 if renamed: | |
170 renamed = renamed[0] | |
171 data = fctx.data() | |
172 if f == '.hgtags': | |
173 newdata = [] | |
174 for line in data.splitlines(): | |
175 id, name = line.split(' ', 1) | |
176 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]), | |
177 name)) | |
178 data = ''.join(newdata) | |
179 return context.memfilectx(f, data, 'l' in fctx.flags(), | |
180 'x' in fctx.flags(), renamed) | |
181 | |
182 dstfiles = [] | |
183 for file in files: | |
184 if lfutil.isstandin(file): | |
185 dstfiles.append(lfutil.splitstandin(file)) | |
186 else: | |
187 dstfiles.append(file) | |
188 # Commit | |
189 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles, | |
190 getfilectx, ctx.user(), ctx.date(), ctx.extra()) | |
191 ret = rdst.commitctx(mctx) | |
192 rdst.dirstate.setparents(ret) | |
193 revmap[ctx.node()] = rdst.changelog.tip() | |
194 | |
195 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles, | |
196 matcher, size, lfiletohash): | |
197 # Convert src parents to dst parents | |
198 parents = [] | |
199 for p in ctx.parents(): | |
200 parents.append(revmap[p.node()]) | |
201 while len(parents) < 2: | |
202 parents.append(node.nullid) | |
203 | |
204 # Generate list of changed files | |
205 files = set(ctx.files()) | |
206 if node.nullid not in parents: | |
207 mc = ctx.manifest() | |
208 mp1 = ctx.parents()[0].manifest() | |
209 mp2 = ctx.parents()[1].manifest() | |
210 files |= (set(mp1) | set(mp2)) - set(mc) | |
211 for f in mc: | |
212 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None): | |
213 files.add(f) | |
214 | |
215 dstfiles = [] | |
216 for f in files: | |
217 if f not in lfiles and f not in normalfiles: | |
218 islfile = _islfile(f, ctx, matcher, size) | |
219 # If this file was renamed or copied then copy | |
220 # the lfileness of its predecessor | |
221 if f in ctx.manifest(): | |
222 fctx = ctx.filectx(f) | |
223 renamed = fctx.renamed() | |
224 renamedlfile = renamed and renamed[0] in lfiles | |
225 islfile |= renamedlfile | |
226 if 'l' in fctx.flags(): | |
227 if renamedlfile: | |
228 raise util.Abort( | |
15170
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
229 _('Renamed/copied largefile %s becomes symlink') |
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
230 % f) |
15168 | 231 islfile = False |
232 if islfile: | |
233 lfiles.add(f) | |
234 else: | |
235 normalfiles.add(f) | |
236 | |
237 if f in lfiles: | |
238 dstfiles.append(lfutil.standin(f)) | |
15254
dd03d3a9f888
largefiles: more work on cleaning up comments
Greg Ward <greg@gerg.ca>
parents:
15253
diff
changeset
|
239 # largefile in manifest if it has not been removed/renamed |
15168 | 240 if f in ctx.manifest(): |
241 if 'l' in ctx.filectx(f).flags(): | |
242 if renamed and renamed[0] in lfiles: | |
243 raise util.Abort(_('largefile %s becomes symlink') % f) | |
244 | |
15254
dd03d3a9f888
largefiles: more work on cleaning up comments
Greg Ward <greg@gerg.ca>
parents:
15253
diff
changeset
|
245 # largefile was modified, update standins |
15168 | 246 fullpath = rdst.wjoin(f) |
247 lfutil.createdir(os.path.dirname(fullpath)) | |
248 m = util.sha1('') | |
249 m.update(ctx[f].data()) | |
250 hash = m.hexdigest() | |
251 if f not in lfiletohash or lfiletohash[f] != hash: | |
252 try: | |
253 fd = open(fullpath, 'wb') | |
254 fd.write(ctx[f].data()) | |
255 finally: | |
256 if fd: | |
257 fd.close() | |
258 executable = 'x' in ctx[f].flags() | |
259 os.chmod(fullpath, lfutil.getmode(executable)) | |
260 lfutil.writestandin(rdst, lfutil.standin(f), hash, | |
261 executable) | |
262 lfiletohash[f] = hash | |
263 else: | |
264 # normal file | |
265 dstfiles.append(f) | |
266 | |
267 def getfilectx(repo, memctx, f): | |
268 if lfutil.isstandin(f): | |
269 # if the file isn't in the manifest then it was removed | |
270 # or renamed, raise IOError to indicate this | |
271 srcfname = lfutil.splitstandin(f) | |
272 try: | |
273 fctx = ctx.filectx(srcfname) | |
274 except error.LookupError: | |
275 raise IOError() | |
276 renamed = fctx.renamed() | |
277 if renamed: | |
15254
dd03d3a9f888
largefiles: more work on cleaning up comments
Greg Ward <greg@gerg.ca>
parents:
15253
diff
changeset
|
278 # standin is always a largefile because largefile-ness |
15168 | 279 # doesn't change after rename or copy |
280 renamed = lfutil.standin(renamed[0]) | |
281 | |
15313
3eb1a90ea409
largefiles: fix newline for lfconverted repos
Eli Carter <eli.carter@tektronix.com>
parents:
15303
diff
changeset
|
282 return context.memfilectx(f, lfiletohash[srcfname] + '\n', 'l' in |
15168 | 283 fctx.flags(), 'x' in fctx.flags(), renamed) |
284 else: | |
285 try: | |
286 fctx = ctx.filectx(f) | |
287 except error.LookupError: | |
288 raise IOError() | |
289 renamed = fctx.renamed() | |
290 if renamed: | |
291 renamed = renamed[0] | |
292 | |
293 data = fctx.data() | |
294 if f == '.hgtags': | |
295 newdata = [] | |
296 for line in data.splitlines(): | |
297 id, name = line.split(' ', 1) | |
298 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]), | |
299 name)) | |
300 data = ''.join(newdata) | |
301 return context.memfilectx(f, data, 'l' in fctx.flags(), | |
302 'x' in fctx.flags(), renamed) | |
303 | |
304 # Commit | |
305 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles, | |
306 getfilectx, ctx.user(), ctx.date(), ctx.extra()) | |
307 ret = rdst.commitctx(mctx) | |
308 rdst.dirstate.setparents(ret) | |
309 revmap[ctx.node()] = rdst.changelog.tip() | |
310 | |
311 def _islfile(file, ctx, matcher, size): | |
15252
6e809bb4f969
largefiles: improve comments, internal docstrings
Greg Ward <greg@gerg.ca>
parents:
15230
diff
changeset
|
312 '''Return true if file should be considered a largefile, i.e. |
6e809bb4f969
largefiles: improve comments, internal docstrings
Greg Ward <greg@gerg.ca>
parents:
15230
diff
changeset
|
313 matcher matches it or it is larger than size.''' |
6e809bb4f969
largefiles: improve comments, internal docstrings
Greg Ward <greg@gerg.ca>
parents:
15230
diff
changeset
|
314 # never store special .hg* files as largefiles |
15168 | 315 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs': |
316 return False | |
317 if matcher and matcher(file): | |
318 return True | |
319 try: | |
320 return ctx.filectx(file).size() >= size * 1024 * 1024 | |
321 except error.LookupError: | |
322 return False | |
323 | |
324 def uploadlfiles(ui, rsrc, rdst, files): | |
325 '''upload largefiles to the central store''' | |
326 | |
15317
41f371150ccb
largefiles: make the store primary, and the user cache secondary
Benjamin Pollack <benjamin@bitquabit.com>
parents:
15313
diff
changeset
|
327 if not files: |
15168 | 328 return |
329 | |
330 store = basestore._openstore(rsrc, rdst, put=True) | |
331 | |
332 at = 0 | |
333 files = filter(lambda h: not store.exists(h), files) | |
334 for hash in files: | |
15170
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
335 ui.progress(_('uploading largefiles'), at, unit='largefile', |
c1a4a3220711
largefiles: fix over-long lines
Matt Mackall <mpm@selenic.com>
parents:
15168
diff
changeset
|
336 total=len(files)) |
15168 | 337 source = lfutil.findfile(rsrc, hash) |
338 if not source: | |
15253
67d010779907
largefiles: improve error reporting
Greg Ward <greg@gerg.ca>
parents:
15252
diff
changeset
|
339 raise util.Abort(_('largefile %s missing from store' |
67d010779907
largefiles: improve error reporting
Greg Ward <greg@gerg.ca>
parents:
15252
diff
changeset
|
340 ' (needs to be uploaded)') % hash) |
15168 | 341 # XXX check for errors here |
342 store.put(source, hash) | |
343 at += 1 | |
15173
3d27a8ff895f
largefiles: mark a string for translation
Matt Mackall <mpm@selenic.com>
parents:
15172
diff
changeset
|
344 ui.progress(_('uploading largefiles'), None) |
15168 | 345 |
346 def verifylfiles(ui, repo, all=False, contents=False): | |
347 '''Verify that every big file revision in the current changeset | |
348 exists in the central store. With --contents, also verify that | |
349 the contents of each big file revision are correct (SHA-1 hash | |
350 matches the revision ID). With --all, check every changeset in | |
351 this repository.''' | |
352 if all: | |
353 # Pass a list to the function rather than an iterator because we know a | |
354 # list will work. | |
355 revs = range(len(repo)) | |
356 else: | |
357 revs = ['.'] | |
358 | |
359 store = basestore._openstore(repo) | |
360 return store.verify(revs, contents=contents) | |
361 | |
362 def cachelfiles(ui, repo, node): | |
363 '''cachelfiles ensures that all largefiles needed by the specified revision | |
364 are present in the repository's largefile cache. | |
365 | |
366 returns a tuple (cached, missing). cached is the list of files downloaded | |
367 by this operation; missing is the list of files that were needed but could | |
368 not be found.''' | |
369 lfiles = lfutil.listlfiles(repo, node) | |
370 toget = [] | |
371 | |
372 for lfile in lfiles: | |
373 expectedhash = repo[node][lfutil.standin(lfile)].data().strip() | |
374 # if it exists and its hash matches, it might have been locally | |
375 # modified before updating and the user chose 'local'. in this case, | |
376 # it will not be in any store, so don't look for it. | |
15255
7ab05d752405
largefiles: cosmetics, whitespace, code style
Greg Ward <greg@gerg.ca>
parents:
15254
diff
changeset
|
377 if ((not os.path.exists(repo.wjoin(lfile)) or |
7ab05d752405
largefiles: cosmetics, whitespace, code style
Greg Ward <greg@gerg.ca>
parents:
15254
diff
changeset
|
378 expectedhash != lfutil.hashfile(repo.wjoin(lfile))) and |
7ab05d752405
largefiles: cosmetics, whitespace, code style
Greg Ward <greg@gerg.ca>
parents:
15254
diff
changeset
|
379 not lfutil.findfile(repo, expectedhash)): |
15168 | 380 toget.append((lfile, expectedhash)) |
381 | |
382 if toget: | |
383 store = basestore._openstore(repo) | |
384 ret = store.get(toget) | |
385 return ret | |
386 | |
387 return ([], []) | |
388 | |
389 def updatelfiles(ui, repo, filelist=None, printmessage=True): | |
390 wlock = repo.wlock() | |
391 try: | |
392 lfdirstate = lfutil.openlfdirstate(ui, repo) | |
393 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate) | |
394 | |
395 if filelist is not None: | |
396 lfiles = [f for f in lfiles if f in filelist] | |
397 | |
398 printed = False | |
399 if printmessage and lfiles: | |
400 ui.status(_('getting changed largefiles\n')) | |
401 printed = True | |
402 cachelfiles(ui, repo, '.') | |
403 | |
404 updated, removed = 0, 0 | |
405 for i in map(lambda f: _updatelfile(repo, lfdirstate, f), lfiles): | |
406 # increment the appropriate counter according to _updatelfile's | |
407 # return value | |
408 updated += i > 0 and i or 0 | |
409 removed -= i < 0 and i or 0 | |
410 if printmessage and (removed or updated) and not printed: | |
411 ui.status(_('getting changed largefiles\n')) | |
412 printed = True | |
413 | |
414 lfdirstate.write() | |
415 if printed and printmessage: | |
416 ui.status(_('%d largefiles updated, %d removed\n') % (updated, | |
417 removed)) | |
418 finally: | |
419 wlock.release() | |
420 | |
421 def _updatelfile(repo, lfdirstate, lfile): | |
422 '''updates a single largefile and copies the state of its standin from | |
423 the repository's dirstate to its state in the lfdirstate. | |
424 | |
425 returns 1 if the file was modified, -1 if the file was removed, 0 if the | |
426 file was unchanged, and None if the needed largefile was missing from the | |
427 cache.''' | |
428 ret = 0 | |
429 abslfile = repo.wjoin(lfile) | |
430 absstandin = repo.wjoin(lfutil.standin(lfile)) | |
431 if os.path.exists(absstandin): | |
432 if os.path.exists(absstandin+'.orig'): | |
433 shutil.copyfile(abslfile, abslfile+'.orig') | |
434 expecthash = lfutil.readstandin(repo, lfile) | |
15255
7ab05d752405
largefiles: cosmetics, whitespace, code style
Greg Ward <greg@gerg.ca>
parents:
15254
diff
changeset
|
435 if (expecthash != '' and |
7ab05d752405
largefiles: cosmetics, whitespace, code style
Greg Ward <greg@gerg.ca>
parents:
15254
diff
changeset
|
436 (not os.path.exists(abslfile) or |
7ab05d752405
largefiles: cosmetics, whitespace, code style
Greg Ward <greg@gerg.ca>
parents:
15254
diff
changeset
|
437 expecthash != lfutil.hashfile(abslfile))): |
15168 | 438 if not lfutil.copyfromcache(repo, expecthash, lfile): |
439 return None # don't try to set the mode or update the dirstate | |
440 ret = 1 | |
441 mode = os.stat(absstandin).st_mode | |
442 if mode != os.stat(abslfile).st_mode: | |
443 os.chmod(abslfile, mode) | |
444 ret = 1 | |
445 else: | |
446 if os.path.exists(abslfile): | |
447 os.unlink(abslfile) | |
448 ret = -1 | |
449 state = repo.dirstate[lfutil.standin(lfile)] | |
450 if state == 'n': | |
451 lfdirstate.normal(lfile) | |
452 elif state == 'r': | |
453 lfdirstate.remove(lfile) | |
454 elif state == 'a': | |
455 lfdirstate.add(lfile) | |
456 elif state == '?': | |
15224
7c604d8c7e83
largefiles: remove pre-1.9 code from extension first bundled with 1.9
Na'Tosha Bard <natosha@unity3d.com>
parents:
15173
diff
changeset
|
457 lfdirstate.drop(lfile) |
15168 | 458 return ret |
459 | |
460 # -- hg commands declarations ------------------------------------------------ | |
461 | |
462 cmdtable = { | |
463 'lfconvert': (lfconvert, | |
15230 | 464 [('s', 'size', '', |
465 _('minimum size (MB) for files to be converted ' | |
466 'as largefiles'), | |
467 'SIZE'), | |
15332
0db47b8d025f
largefiles: rename lfconvert --tonormal option to --to-normal
Greg Ward <greg@gerg.ca>
parents:
15317
diff
changeset
|
468 ('', 'to-normal', False, |
15230 | 469 _('convert from a largefiles repo to a normal repo')), |
470 ], | |
15168 | 471 _('hg lfconvert SOURCE DEST [FILE ...]')), |
472 } |