Mercurial > public > mercurial-scm > hg-stable
comparison mercurial/localrepo.py @ 1402:9d2c2e6b32b5
i18n part2: use '_' for all strings who are part of the user interface
author | Benoit Boissinot <benoit.boissinot@ens-lyon.org> |
---|---|
date | Tue, 18 Oct 2005 18:38:39 -0700 |
parents | cf9a1233738a |
children | c6e6ca96a033 |
comparison
equal
deleted
inserted
replaced
1401:fbf2b10011aa | 1402:9d2c2e6b32b5 |
---|---|
17 if not path: | 17 if not path: |
18 p = os.getcwd() | 18 p = os.getcwd() |
19 while not os.path.isdir(os.path.join(p, ".hg")): | 19 while not os.path.isdir(os.path.join(p, ".hg")): |
20 oldp = p | 20 oldp = p |
21 p = os.path.dirname(p) | 21 p = os.path.dirname(p) |
22 if p == oldp: raise repo.RepoError("no repo found") | 22 if p == oldp: raise repo.RepoError(_("no repo found")) |
23 path = p | 23 path = p |
24 self.path = os.path.join(path, ".hg") | 24 self.path = os.path.join(path, ".hg") |
25 | 25 |
26 if not create and not os.path.isdir(self.path): | 26 if not create and not os.path.isdir(self.path): |
27 raise repo.RepoError("repository %s not found" % self.path) | 27 raise repo.RepoError(_("repository %s not found") % self.path) |
28 | 28 |
29 self.root = os.path.abspath(path) | 29 self.root = os.path.abspath(path) |
30 self.ui = ui | 30 self.ui = ui |
31 self.opener = util.opener(self.path) | 31 self.opener = util.opener(self.path) |
32 self.wopener = util.opener(self.root) | 32 self.wopener = util.opener(self.root) |
47 except IOError: pass | 47 except IOError: pass |
48 | 48 |
49 def hook(self, name, **args): | 49 def hook(self, name, **args): |
50 s = self.ui.config("hooks", name) | 50 s = self.ui.config("hooks", name) |
51 if s: | 51 if s: |
52 self.ui.note("running hook %s: %s\n" % (name, s)) | 52 self.ui.note(_("running hook %s: %s\n") % (name, s)) |
53 old = {} | 53 old = {} |
54 for k, v in args.items(): | 54 for k, v in args.items(): |
55 k = k.upper() | 55 k = k.upper() |
56 old[k] = os.environ.get(k, None) | 56 old[k] = os.environ.get(k, None) |
57 os.environ[k] = v | 57 os.environ[k] = v |
67 os.environ[k] = v | 67 os.environ[k] = v |
68 else: | 68 else: |
69 del os.environ[k] | 69 del os.environ[k] |
70 | 70 |
71 if r: | 71 if r: |
72 self.ui.warn("abort: %s hook failed with status %d!\n" % | 72 self.ui.warn(_("abort: %s hook failed with status %d!\n") % |
73 (name, r)) | 73 (name, r)) |
74 return False | 74 return False |
75 return True | 75 return True |
76 | 76 |
77 def tags(self): | 77 def tags(self): |
137 return self.tags()[key] | 137 return self.tags()[key] |
138 except KeyError: | 138 except KeyError: |
139 try: | 139 try: |
140 return self.changelog.lookup(key) | 140 return self.changelog.lookup(key) |
141 except: | 141 except: |
142 raise repo.RepoError("unknown revision '%s'" % key) | 142 raise repo.RepoError(_("unknown revision '%s'") % key) |
143 | 143 |
144 def dev(self): | 144 def dev(self): |
145 return os.stat(self.path).st_dev | 145 return os.stat(self.path).st_dev |
146 | 146 |
147 def local(self): | 147 def local(self): |
173 | 173 |
174 data = self.wopener(filename, 'r').read() | 174 data = self.wopener(filename, 'r').read() |
175 | 175 |
176 for mf, cmd in self.encodepats: | 176 for mf, cmd in self.encodepats: |
177 if mf(filename): | 177 if mf(filename): |
178 self.ui.debug("filtering %s through %s\n" % (filename, cmd)) | 178 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd)) |
179 data = util.filter(data, cmd) | 179 data = util.filter(data, cmd) |
180 break | 180 break |
181 | 181 |
182 return data | 182 return data |
183 | 183 |
189 l.append((mf, cmd)) | 189 l.append((mf, cmd)) |
190 self.decodepats = l | 190 self.decodepats = l |
191 | 191 |
192 for mf, cmd in self.decodepats: | 192 for mf, cmd in self.decodepats: |
193 if mf(filename): | 193 if mf(filename): |
194 self.ui.debug("filtering %s through %s\n" % (filename, cmd)) | 194 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd)) |
195 data = util.filter(data, cmd) | 195 data = util.filter(data, cmd) |
196 break | 196 break |
197 | 197 |
198 if fd: | 198 if fd: |
199 return fd.write(data) | 199 return fd.write(data) |
216 self.join("journal"), after) | 216 self.join("journal"), after) |
217 | 217 |
218 def recover(self): | 218 def recover(self): |
219 lock = self.lock() | 219 lock = self.lock() |
220 if os.path.exists(self.join("journal")): | 220 if os.path.exists(self.join("journal")): |
221 self.ui.status("rolling back interrupted transaction\n") | 221 self.ui.status(_("rolling back interrupted transaction\n")) |
222 return transaction.rollback(self.opener, self.join("journal")) | 222 return transaction.rollback(self.opener, self.join("journal")) |
223 else: | 223 else: |
224 self.ui.warn("no interrupted transaction available\n") | 224 self.ui.warn(_("no interrupted transaction available\n")) |
225 | 225 |
226 def undo(self): | 226 def undo(self): |
227 lock = self.lock() | 227 lock = self.lock() |
228 if os.path.exists(self.join("undo")): | 228 if os.path.exists(self.join("undo")): |
229 self.ui.status("rolling back last transaction\n") | 229 self.ui.status(_("rolling back last transaction\n")) |
230 transaction.rollback(self.opener, self.join("undo")) | 230 transaction.rollback(self.opener, self.join("undo")) |
231 self.dirstate = None | 231 self.dirstate = None |
232 util.rename(self.join("undo.dirstate"), self.join("dirstate")) | 232 util.rename(self.join("undo.dirstate"), self.join("dirstate")) |
233 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root) | 233 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root) |
234 else: | 234 else: |
235 self.ui.warn("no undo information available\n") | 235 self.ui.warn(_("no undo information available\n")) |
236 | 236 |
237 def lock(self, wait=1): | 237 def lock(self, wait=1): |
238 try: | 238 try: |
239 return lock.lock(self.join("lock"), 0) | 239 return lock.lock(self.join("lock"), 0) |
240 except lock.LockHeld, inst: | 240 except lock.LockHeld, inst: |
241 if wait: | 241 if wait: |
242 self.ui.warn("waiting for lock held by %s\n" % inst.args[0]) | 242 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0]) |
243 return lock.lock(self.join("lock"), wait) | 243 return lock.lock(self.join("lock"), wait) |
244 raise inst | 244 raise inst |
245 | 245 |
246 def rawcommit(self, files, text, user, date, p1=None, p2=None): | 246 def rawcommit(self, files, text, user, date, p1=None, p2=None): |
247 orig_parent = self.dirstate.parents()[0] or nullid | 247 orig_parent = self.dirstate.parents()[0] or nullid |
325 if s in 'nmai': | 325 if s in 'nmai': |
326 commit.append(f) | 326 commit.append(f) |
327 elif s == 'r': | 327 elif s == 'r': |
328 remove.append(f) | 328 remove.append(f) |
329 else: | 329 else: |
330 self.ui.warn("%s not tracked!\n" % f) | 330 self.ui.warn(_("%s not tracked!\n") % f) |
331 else: | 331 else: |
332 (c, a, d, u) = self.changes(match=match) | 332 (c, a, d, u) = self.changes(match=match) |
333 commit = c + a | 333 commit = c + a |
334 remove = d | 334 remove = d |
335 | 335 |
339 m1 = self.manifest.read(c1[0]) | 339 m1 = self.manifest.read(c1[0]) |
340 mf1 = self.manifest.readflags(c1[0]) | 340 mf1 = self.manifest.readflags(c1[0]) |
341 m2 = self.manifest.read(c2[0]) | 341 m2 = self.manifest.read(c2[0]) |
342 | 342 |
343 if not commit and not remove and not force and p2 == nullid: | 343 if not commit and not remove and not force and p2 == nullid: |
344 self.ui.status("nothing changed\n") | 344 self.ui.status(_("nothing changed\n")) |
345 return None | 345 return None |
346 | 346 |
347 if not self.hook("precommit"): | 347 if not self.hook("precommit"): |
348 return None | 348 return None |
349 | 349 |
358 self.ui.note(f + "\n") | 358 self.ui.note(f + "\n") |
359 try: | 359 try: |
360 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False)) | 360 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False)) |
361 t = self.wread(f) | 361 t = self.wread(f) |
362 except IOError: | 362 except IOError: |
363 self.ui.warn("trouble committing %s!\n" % f) | 363 self.ui.warn(_("trouble committing %s!\n") % f) |
364 raise | 364 raise |
365 | 365 |
366 r = self.file(f) | 366 r = self.file(f) |
367 | 367 |
368 meta = {} | 368 meta = {} |
369 cp = self.dirstate.copied(f) | 369 cp = self.dirstate.copied(f) |
370 if cp: | 370 if cp: |
371 meta["copy"] = cp | 371 meta["copy"] = cp |
372 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid))) | 372 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid))) |
373 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"])) | 373 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"])) |
374 fp1, fp2 = nullid, nullid | 374 fp1, fp2 = nullid, nullid |
375 else: | 375 else: |
376 fp1 = m1.get(f, nullid) | 376 fp1 = m1.get(f, nullid) |
377 fp2 = m2.get(f, nullid) | 377 fp2 = m2.get(f, nullid) |
378 | 378 |
519 | 519 |
520 def add(self, list): | 520 def add(self, list): |
521 for f in list: | 521 for f in list: |
522 p = self.wjoin(f) | 522 p = self.wjoin(f) |
523 if not os.path.exists(p): | 523 if not os.path.exists(p): |
524 self.ui.warn("%s does not exist!\n" % f) | 524 self.ui.warn(_("%s does not exist!\n") % f) |
525 elif not os.path.isfile(p): | 525 elif not os.path.isfile(p): |
526 self.ui.warn("%s not added: only files supported currently\n" % f) | 526 self.ui.warn(_("%s not added: only files supported currently\n") % f) |
527 elif self.dirstate.state(f) in 'an': | 527 elif self.dirstate.state(f) in 'an': |
528 self.ui.warn("%s already tracked!\n" % f) | 528 self.ui.warn(_("%s already tracked!\n") % f) |
529 else: | 529 else: |
530 self.dirstate.update([f], "a") | 530 self.dirstate.update([f], "a") |
531 | 531 |
532 def forget(self, list): | 532 def forget(self, list): |
533 for f in list: | 533 for f in list: |
534 if self.dirstate.state(f) not in 'ai': | 534 if self.dirstate.state(f) not in 'ai': |
535 self.ui.warn("%s not added!\n" % f) | 535 self.ui.warn(_("%s not added!\n") % f) |
536 else: | 536 else: |
537 self.dirstate.forget([f]) | 537 self.dirstate.forget([f]) |
538 | 538 |
539 def remove(self, list): | 539 def remove(self, list): |
540 for f in list: | 540 for f in list: |
541 p = self.wjoin(f) | 541 p = self.wjoin(f) |
542 if os.path.exists(p): | 542 if os.path.exists(p): |
543 self.ui.warn("%s still exists!\n" % f) | 543 self.ui.warn(_("%s still exists!\n") % f) |
544 elif self.dirstate.state(f) == 'a': | 544 elif self.dirstate.state(f) == 'a': |
545 self.ui.warn("%s never committed!\n" % f) | 545 self.ui.warn(_("%s never committed!\n") % f) |
546 self.dirstate.forget([f]) | 546 self.dirstate.forget([f]) |
547 elif f not in self.dirstate: | 547 elif f not in self.dirstate: |
548 self.ui.warn("%s not tracked!\n" % f) | 548 self.ui.warn(_("%s not tracked!\n") % f) |
549 else: | 549 else: |
550 self.dirstate.update([f], "r") | 550 self.dirstate.update([f], "r") |
551 | 551 |
552 def copy(self, source, dest): | 552 def copy(self, source, dest): |
553 p = self.wjoin(dest) | 553 p = self.wjoin(dest) |
554 if not os.path.exists(p): | 554 if not os.path.exists(p): |
555 self.ui.warn("%s does not exist!\n" % dest) | 555 self.ui.warn(_("%s does not exist!\n") % dest) |
556 elif not os.path.isfile(p): | 556 elif not os.path.isfile(p): |
557 self.ui.warn("copy failed: %s is not a file\n" % dest) | 557 self.ui.warn(_("copy failed: %s is not a file\n") % dest) |
558 else: | 558 else: |
559 if self.dirstate.state(dest) == '?': | 559 if self.dirstate.state(dest) == '?': |
560 self.dirstate.update([dest], "a") | 560 self.dirstate.update([dest], "a") |
561 self.dirstate.copy(source, dest) | 561 self.dirstate.copy(source, dest) |
562 | 562 |
736 if base == None: | 736 if base == None: |
737 base = {} | 737 base = {} |
738 | 738 |
739 # assume we're closer to the tip than the root | 739 # assume we're closer to the tip than the root |
740 # and start by examining the heads | 740 # and start by examining the heads |
741 self.ui.status("searching for changes\n") | 741 self.ui.status(_("searching for changes\n")) |
742 | 742 |
743 if not heads: | 743 if not heads: |
744 heads = remote.heads() | 744 heads = remote.heads() |
745 | 745 |
746 unknown = [] | 746 unknown = [] |
766 while unknown: | 766 while unknown: |
767 n = unknown.pop(0) | 767 n = unknown.pop(0) |
768 if n[0] in seen: | 768 if n[0] in seen: |
769 continue | 769 continue |
770 | 770 |
771 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1]))) | 771 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1]))) |
772 if n[0] == nullid: | 772 if n[0] == nullid: |
773 break | 773 break |
774 if n in seenbranch: | 774 if n in seenbranch: |
775 self.ui.debug("branch already found\n") | 775 self.ui.debug(_("branch already found\n")) |
776 continue | 776 continue |
777 if n[1] and n[1] in m: # do we know the base? | 777 if n[1] and n[1] in m: # do we know the base? |
778 self.ui.debug("found incomplete branch %s:%s\n" | 778 self.ui.debug(_("found incomplete branch %s:%s\n") |
779 % (short(n[0]), short(n[1]))) | 779 % (short(n[0]), short(n[1]))) |
780 search.append(n) # schedule branch range for scanning | 780 search.append(n) # schedule branch range for scanning |
781 seenbranch[n] = 1 | 781 seenbranch[n] = 1 |
782 else: | 782 else: |
783 if n[1] not in seen and n[1] not in fetch: | 783 if n[1] not in seen and n[1] not in fetch: |
784 if n[2] in m and n[3] in m: | 784 if n[2] in m and n[3] in m: |
785 self.ui.debug("found new changeset %s\n" % | 785 self.ui.debug(_("found new changeset %s\n") % |
786 short(n[1])) | 786 short(n[1])) |
787 fetch[n[1]] = 1 # earliest unknown | 787 fetch[n[1]] = 1 # earliest unknown |
788 base[n[2]] = 1 # latest known | 788 base[n[2]] = 1 # latest known |
789 continue | 789 continue |
790 | 790 |
795 | 795 |
796 seen[n[0]] = 1 | 796 seen[n[0]] = 1 |
797 | 797 |
798 if r: | 798 if r: |
799 reqcnt += 1 | 799 reqcnt += 1 |
800 self.ui.debug("request %d: %s\n" % | 800 self.ui.debug(_("request %d: %s\n") % |
801 (reqcnt, " ".join(map(short, r)))) | 801 (reqcnt, " ".join(map(short, r)))) |
802 for p in range(0, len(r), 10): | 802 for p in range(0, len(r), 10): |
803 for b in remote.branches(r[p:p+10]): | 803 for b in remote.branches(r[p:p+10]): |
804 self.ui.debug("received %s:%s\n" % | 804 self.ui.debug(_("received %s:%s\n") % |
805 (short(b[0]), short(b[1]))) | 805 (short(b[0]), short(b[1]))) |
806 if b[0] in m: | 806 if b[0] in m: |
807 self.ui.debug("found base node %s\n" % short(b[0])) | 807 self.ui.debug(_("found base node %s\n") % short(b[0])) |
808 base[b[0]] = 1 | 808 base[b[0]] = 1 |
809 elif b[0] not in seen: | 809 elif b[0] not in seen: |
810 unknown.append(b) | 810 unknown.append(b) |
811 | 811 |
812 # do binary search on the branches we found | 812 # do binary search on the branches we found |
816 l = remote.between([(n[0], n[1])])[0] | 816 l = remote.between([(n[0], n[1])])[0] |
817 l.append(n[1]) | 817 l.append(n[1]) |
818 p = n[0] | 818 p = n[0] |
819 f = 1 | 819 f = 1 |
820 for i in l: | 820 for i in l: |
821 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i))) | 821 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i))) |
822 if i in m: | 822 if i in m: |
823 if f <= 2: | 823 if f <= 2: |
824 self.ui.debug("found new branch changeset %s\n" % | 824 self.ui.debug(_("found new branch changeset %s\n") % |
825 short(p)) | 825 short(p)) |
826 fetch[p] = 1 | 826 fetch[p] = 1 |
827 base[i] = 1 | 827 base[i] = 1 |
828 else: | 828 else: |
829 self.ui.debug("narrowed branch search to %s:%s\n" | 829 self.ui.debug(_("narrowed branch search to %s:%s\n") |
830 % (short(p), short(i))) | 830 % (short(p), short(i))) |
831 search.append((p, i)) | 831 search.append((p, i)) |
832 break | 832 break |
833 p, f = i, f * 2 | 833 p, f = i, f * 2 |
834 | 834 |
835 # sanity check our fetch list | 835 # sanity check our fetch list |
836 for f in fetch.keys(): | 836 for f in fetch.keys(): |
837 if f in m: | 837 if f in m: |
838 raise repo.RepoError("already have changeset " + short(f[:4])) | 838 raise repo.RepoError(_("already have changeset ") + short(f[:4])) |
839 | 839 |
840 if base.keys() == [nullid]: | 840 if base.keys() == [nullid]: |
841 self.ui.warn("warning: pulling from an unrelated repository!\n") | 841 self.ui.warn(_("warning: pulling from an unrelated repository!\n")) |
842 | 842 |
843 self.ui.note("found new changesets starting at " + | 843 self.ui.note(_("found new changesets starting at ") + |
844 " ".join([short(f) for f in fetch]) + "\n") | 844 " ".join([short(f) for f in fetch]) + "\n") |
845 | 845 |
846 self.ui.debug("%d total queries\n" % reqcnt) | 846 self.ui.debug(_("%d total queries\n") % reqcnt) |
847 | 847 |
848 return fetch.keys() | 848 return fetch.keys() |
849 | 849 |
850 def findoutgoing(self, remote, base=None, heads=None): | 850 def findoutgoing(self, remote, base=None, heads=None): |
851 if base == None: | 851 if base == None: |
852 base = {} | 852 base = {} |
853 self.findincoming(remote, base, heads) | 853 self.findincoming(remote, base, heads) |
854 | 854 |
855 self.ui.debug("common changesets up to " | 855 self.ui.debug(_("common changesets up to ") |
856 + " ".join(map(short, base.keys())) + "\n") | 856 + " ".join(map(short, base.keys())) + "\n") |
857 | 857 |
858 remain = dict.fromkeys(self.changelog.nodemap) | 858 remain = dict.fromkeys(self.changelog.nodemap) |
859 | 859 |
860 # prune everything remote has from the tree | 860 # prune everything remote has from the tree |
880 def pull(self, remote): | 880 def pull(self, remote): |
881 lock = self.lock() | 881 lock = self.lock() |
882 | 882 |
883 # if we have an empty repo, fetch everything | 883 # if we have an empty repo, fetch everything |
884 if self.changelog.tip() == nullid: | 884 if self.changelog.tip() == nullid: |
885 self.ui.status("requesting all changes\n") | 885 self.ui.status(_("requesting all changes\n")) |
886 fetch = [nullid] | 886 fetch = [nullid] |
887 else: | 887 else: |
888 fetch = self.findincoming(remote) | 888 fetch = self.findincoming(remote) |
889 | 889 |
890 if not fetch: | 890 if not fetch: |
891 self.ui.status("no changes found\n") | 891 self.ui.status(_("no changes found\n")) |
892 return 1 | 892 return 1 |
893 | 893 |
894 cg = remote.changegroup(fetch) | 894 cg = remote.changegroup(fetch) |
895 return self.addchangegroup(cg) | 895 return self.addchangegroup(cg) |
896 | 896 |
899 | 899 |
900 base = {} | 900 base = {} |
901 heads = remote.heads() | 901 heads = remote.heads() |
902 inc = self.findincoming(remote, base, heads) | 902 inc = self.findincoming(remote, base, heads) |
903 if not force and inc: | 903 if not force and inc: |
904 self.ui.warn("abort: unsynced remote changes!\n") | 904 self.ui.warn(_("abort: unsynced remote changes!\n")) |
905 self.ui.status("(did you forget to sync? use push -f to force)\n") | 905 self.ui.status(_("(did you forget to sync? use push -f to force)\n")) |
906 return 1 | 906 return 1 |
907 | 907 |
908 update = self.findoutgoing(remote, base) | 908 update = self.findoutgoing(remote, base) |
909 if not update: | 909 if not update: |
910 self.ui.status("no changes found\n") | 910 self.ui.status(_("no changes found\n")) |
911 return 1 | 911 return 1 |
912 elif not force: | 912 elif not force: |
913 if len(heads) < len(self.changelog.heads()): | 913 if len(heads) < len(self.changelog.heads()): |
914 self.ui.warn("abort: push creates new remote branches!\n") | 914 self.ui.warn(_("abort: push creates new remote branches!\n")) |
915 self.ui.status("(did you forget to merge?" + | 915 self.ui.status(_("(did you forget to merge?" |
916 " use push -f to force)\n") | 916 " use push -f to force)\n")) |
917 return 1 | 917 return 1 |
918 | 918 |
919 cg = self.changegroup(update) | 919 cg = self.changegroup(update) |
920 return remote.addchangegroup(cg) | 920 return remote.addchangegroup(cg) |
921 | 921 |
961 if not d: return "" | 961 if not d: return "" |
962 l = struct.unpack(">l", d)[0] | 962 l = struct.unpack(">l", d)[0] |
963 if l <= 4: return "" | 963 if l <= 4: return "" |
964 d = source.read(l - 4) | 964 d = source.read(l - 4) |
965 if len(d) < l - 4: | 965 if len(d) < l - 4: |
966 raise repo.RepoError("premature EOF reading chunk" + | 966 raise repo.RepoError(_("premature EOF reading chunk" |
967 " (got %d bytes, expected %d)" | 967 " (got %d bytes, expected %d)") |
968 % (len(d), l - 4)) | 968 % (len(d), l - 4)) |
969 return d | 969 return d |
970 | 970 |
971 def getgroup(): | 971 def getgroup(): |
972 while 1: | 972 while 1: |
973 c = getchunk() | 973 c = getchunk() |
974 if not c: break | 974 if not c: break |
975 yield c | 975 yield c |
976 | 976 |
977 def csmap(x): | 977 def csmap(x): |
978 self.ui.debug("add changeset %s\n" % short(x)) | 978 self.ui.debug(_("add changeset %s\n") % short(x)) |
979 return self.changelog.count() | 979 return self.changelog.count() |
980 | 980 |
981 def revmap(x): | 981 def revmap(x): |
982 return self.changelog.rev(x) | 982 return self.changelog.rev(x) |
983 | 983 |
987 tr = self.transaction() | 987 tr = self.transaction() |
988 | 988 |
989 oldheads = len(self.changelog.heads()) | 989 oldheads = len(self.changelog.heads()) |
990 | 990 |
991 # pull off the changeset group | 991 # pull off the changeset group |
992 self.ui.status("adding changesets\n") | 992 self.ui.status(_("adding changesets\n")) |
993 co = self.changelog.tip() | 993 co = self.changelog.tip() |
994 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique | 994 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique |
995 cnr, cor = map(self.changelog.rev, (cn, co)) | 995 cnr, cor = map(self.changelog.rev, (cn, co)) |
996 if cn == nullid: | 996 if cn == nullid: |
997 cnr = cor | 997 cnr = cor |
998 changesets = cnr - cor | 998 changesets = cnr - cor |
999 | 999 |
1000 # pull off the manifest group | 1000 # pull off the manifest group |
1001 self.ui.status("adding manifests\n") | 1001 self.ui.status(_("adding manifests\n")) |
1002 mm = self.manifest.tip() | 1002 mm = self.manifest.tip() |
1003 mo = self.manifest.addgroup(getgroup(), revmap, tr) | 1003 mo = self.manifest.addgroup(getgroup(), revmap, tr) |
1004 | 1004 |
1005 # process the files | 1005 # process the files |
1006 self.ui.status("adding file changes\n") | 1006 self.ui.status(_("adding file changes\n")) |
1007 while 1: | 1007 while 1: |
1008 f = getchunk() | 1008 f = getchunk() |
1009 if not f: break | 1009 if not f: break |
1010 self.ui.debug("adding %s revisions\n" % f) | 1010 self.ui.debug(_("adding %s revisions\n") % f) |
1011 fl = self.file(f) | 1011 fl = self.file(f) |
1012 o = fl.count() | 1012 o = fl.count() |
1013 n = fl.addgroup(getgroup(), revmap, tr) | 1013 n = fl.addgroup(getgroup(), revmap, tr) |
1014 revisions += fl.count() - o | 1014 revisions += fl.count() - o |
1015 files += 1 | 1015 files += 1 |
1016 | 1016 |
1017 newheads = len(self.changelog.heads()) | 1017 newheads = len(self.changelog.heads()) |
1018 heads = "" | 1018 heads = "" |
1019 if oldheads and newheads > oldheads: | 1019 if oldheads and newheads > oldheads: |
1020 heads = " (+%d heads)" % (newheads - oldheads) | 1020 heads = _(" (+%d heads)") % (newheads - oldheads) |
1021 | 1021 |
1022 self.ui.status(("added %d changesets" + | 1022 self.ui.status(_("added %d changesets" |
1023 " with %d changes to %d files%s\n") | 1023 " with %d changes to %d files%s\n") |
1024 % (changesets, revisions, files, heads)) | 1024 % (changesets, revisions, files, heads)) |
1025 | 1025 |
1026 tr.close() | 1026 tr.close() |
1027 | 1027 |
1028 if changesets > 0: | 1028 if changesets > 0: |
1029 if not self.hook("changegroup", | 1029 if not self.hook("changegroup", |
1030 node=hex(self.changelog.node(cor+1))): | 1030 node=hex(self.changelog.node(cor+1))): |
1031 self.ui.warn("abort: changegroup hook returned failure!\n") | 1031 self.ui.warn(_("abort: changegroup hook returned failure!\n")) |
1032 return 1 | 1032 return 1 |
1033 | 1033 |
1034 for i in range(cor + 1, cnr + 1): | 1034 for i in range(cor + 1, cnr + 1): |
1035 self.hook("commit", node=hex(self.changelog.node(i))) | 1035 self.hook("commit", node=hex(self.changelog.node(i))) |
1036 | 1036 |
1038 | 1038 |
1039 def update(self, node, allow=False, force=False, choose=None, | 1039 def update(self, node, allow=False, force=False, choose=None, |
1040 moddirstate=True): | 1040 moddirstate=True): |
1041 pl = self.dirstate.parents() | 1041 pl = self.dirstate.parents() |
1042 if not force and pl[1] != nullid: | 1042 if not force and pl[1] != nullid: |
1043 self.ui.warn("aborting: outstanding uncommitted merges\n") | 1043 self.ui.warn(_("aborting: outstanding uncommitted merges\n")) |
1044 return 1 | 1044 return 1 |
1045 | 1045 |
1046 p1, p2 = pl[0], node | 1046 p1, p2 = pl[0], node |
1047 pa = self.changelog.ancestor(p1, p2) | 1047 pa = self.changelog.ancestor(p1, p2) |
1048 m1n = self.changelog.read(p1)[0] | 1048 m1n = self.changelog.read(p1)[0] |
1061 # from p1 to p2? | 1061 # from p1 to p2? |
1062 linear_path = (pa == p1 or pa == p2) | 1062 linear_path = (pa == p1 or pa == p2) |
1063 | 1063 |
1064 # resolve the manifest to determine which files | 1064 # resolve the manifest to determine which files |
1065 # we care about merging | 1065 # we care about merging |
1066 self.ui.note("resolving manifests\n") | 1066 self.ui.note(_("resolving manifests\n")) |
1067 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" % | 1067 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") % |
1068 (force, allow, moddirstate, linear_path)) | 1068 (force, allow, moddirstate, linear_path)) |
1069 self.ui.debug(" ancestor %s local %s remote %s\n" % | 1069 self.ui.debug(_(" ancestor %s local %s remote %s\n") % |
1070 (short(man), short(m1n), short(m2n))) | 1070 (short(man), short(m1n), short(m2n))) |
1071 | 1071 |
1072 merge = {} | 1072 merge = {} |
1073 get = {} | 1073 get = {} |
1074 remove = [] | 1074 remove = [] |
1110 # are files different? | 1110 # are files different? |
1111 if n != m2[f]: | 1111 if n != m2[f]: |
1112 a = ma.get(f, nullid) | 1112 a = ma.get(f, nullid) |
1113 # are both different from the ancestor? | 1113 # are both different from the ancestor? |
1114 if n != a and m2[f] != a: | 1114 if n != a and m2[f] != a: |
1115 self.ui.debug(" %s versions differ, resolve\n" % f) | 1115 self.ui.debug(_(" %s versions differ, resolve\n") % f) |
1116 # merge executable bits | 1116 # merge executable bits |
1117 # "if we changed or they changed, change in merge" | 1117 # "if we changed or they changed, change in merge" |
1118 a, b, c = mfa.get(f, 0), mfw[f], mf2[f] | 1118 a, b, c = mfa.get(f, 0), mfw[f], mf2[f] |
1119 mode = ((a^b) | (a^c)) ^ a | 1119 mode = ((a^b) | (a^c)) ^ a |
1120 merge[f] = (m1.get(f, nullid), m2[f], mode) | 1120 merge[f] = (m1.get(f, nullid), m2[f], mode) |
1121 s = 1 | 1121 s = 1 |
1122 # are we clobbering? | 1122 # are we clobbering? |
1123 # is remote's version newer? | 1123 # is remote's version newer? |
1124 # or are we going back in time? | 1124 # or are we going back in time? |
1125 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]): | 1125 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]): |
1126 self.ui.debug(" remote %s is newer, get\n" % f) | 1126 self.ui.debug(_(" remote %s is newer, get\n") % f) |
1127 get[f] = m2[f] | 1127 get[f] = m2[f] |
1128 s = 1 | 1128 s = 1 |
1129 elif f in umap: | 1129 elif f in umap: |
1130 # this unknown file is the same as the checkout | 1130 # this unknown file is the same as the checkout |
1131 get[f] = m2[f] | 1131 get[f] = m2[f] |
1132 | 1132 |
1133 if not s and mfw[f] != mf2[f]: | 1133 if not s and mfw[f] != mf2[f]: |
1134 if force: | 1134 if force: |
1135 self.ui.debug(" updating permissions for %s\n" % f) | 1135 self.ui.debug(_(" updating permissions for %s\n") % f) |
1136 util.set_exec(self.wjoin(f), mf2[f]) | 1136 util.set_exec(self.wjoin(f), mf2[f]) |
1137 else: | 1137 else: |
1138 a, b, c = mfa.get(f, 0), mfw[f], mf2[f] | 1138 a, b, c = mfa.get(f, 0), mfw[f], mf2[f] |
1139 mode = ((a^b) | (a^c)) ^ a | 1139 mode = ((a^b) | (a^c)) ^ a |
1140 if mode != b: | 1140 if mode != b: |
1141 self.ui.debug(" updating permissions for %s\n" % f) | 1141 self.ui.debug(_(" updating permissions for %s\n") % f) |
1142 util.set_exec(self.wjoin(f), mode) | 1142 util.set_exec(self.wjoin(f), mode) |
1143 del m2[f] | 1143 del m2[f] |
1144 elif f in ma: | 1144 elif f in ma: |
1145 if n != ma[f]: | 1145 if n != ma[f]: |
1146 r = "d" | 1146 r = _("d") |
1147 if not force and (linear_path or allow): | 1147 if not force and (linear_path or allow): |
1148 r = self.ui.prompt( | 1148 r = self.ui.prompt( |
1149 (" local changed %s which remote deleted\n" % f) + | 1149 (_(" local changed %s which remote deleted\n") % f) + |
1150 "(k)eep or (d)elete?", "[kd]", "k") | 1150 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) |
1151 if r == "d": | 1151 if r == _("d"): |
1152 remove.append(f) | 1152 remove.append(f) |
1153 else: | 1153 else: |
1154 self.ui.debug("other deleted %s\n" % f) | 1154 self.ui.debug(_("other deleted %s\n") % f) |
1155 remove.append(f) # other deleted it | 1155 remove.append(f) # other deleted it |
1156 else: | 1156 else: |
1157 # file is created on branch or in working directory | 1157 # file is created on branch or in working directory |
1158 if force and f not in umap: | 1158 if force and f not in umap: |
1159 self.ui.debug("remote deleted %s, clobbering\n" % f) | 1159 self.ui.debug(_("remote deleted %s, clobbering\n") % f) |
1160 remove.append(f) | 1160 remove.append(f) |
1161 elif n == m1.get(f, nullid): # same as parent | 1161 elif n == m1.get(f, nullid): # same as parent |
1162 if p2 == pa: # going backwards? | 1162 if p2 == pa: # going backwards? |
1163 self.ui.debug("remote deleted %s\n" % f) | 1163 self.ui.debug(_("remote deleted %s\n") % f) |
1164 remove.append(f) | 1164 remove.append(f) |
1165 else: | 1165 else: |
1166 self.ui.debug("local modified %s, keeping\n" % f) | 1166 self.ui.debug(_("local modified %s, keeping\n") % f) |
1167 else: | 1167 else: |
1168 self.ui.debug("working dir created %s, keeping\n" % f) | 1168 self.ui.debug(_("working dir created %s, keeping\n") % f) |
1169 | 1169 |
1170 for f, n in m2.iteritems(): | 1170 for f, n in m2.iteritems(): |
1171 if choose and not choose(f): continue | 1171 if choose and not choose(f): continue |
1172 if f[0] == "/": continue | 1172 if f[0] == "/": continue |
1173 if f in ma and n != ma[f]: | 1173 if f in ma and n != ma[f]: |
1174 r = "k" | 1174 r = _("k") |
1175 if not force and (linear_path or allow): | 1175 if not force and (linear_path or allow): |
1176 r = self.ui.prompt( | 1176 r = self.ui.prompt( |
1177 ("remote changed %s which local deleted\n" % f) + | 1177 (_("remote changed %s which local deleted\n") % f) + |
1178 "(k)eep or (d)elete?", "[kd]", "k") | 1178 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) |
1179 if r == "k": get[f] = n | 1179 if r == _("k"): get[f] = n |
1180 elif f not in ma: | 1180 elif f not in ma: |
1181 self.ui.debug("remote created %s\n" % f) | 1181 self.ui.debug(_("remote created %s\n") % f) |
1182 get[f] = n | 1182 get[f] = n |
1183 else: | 1183 else: |
1184 if force or p2 == pa: # going backwards? | 1184 if force or p2 == pa: # going backwards? |
1185 self.ui.debug("local deleted %s, recreating\n" % f) | 1185 self.ui.debug(_("local deleted %s, recreating\n") % f) |
1186 get[f] = n | 1186 get[f] = n |
1187 else: | 1187 else: |
1188 self.ui.debug("local deleted %s\n" % f) | 1188 self.ui.debug(_("local deleted %s\n") % f) |
1189 | 1189 |
1190 del mw, m1, m2, ma | 1190 del mw, m1, m2, ma |
1191 | 1191 |
1192 if force: | 1192 if force: |
1193 for f in merge: | 1193 for f in merge: |
1198 # we don't need to do any magic, just jump to the new rev | 1198 # we don't need to do any magic, just jump to the new rev |
1199 branch_merge = False | 1199 branch_merge = False |
1200 p1, p2 = p2, nullid | 1200 p1, p2 = p2, nullid |
1201 else: | 1201 else: |
1202 if not allow: | 1202 if not allow: |
1203 self.ui.status("this update spans a branch" + | 1203 self.ui.status(_("this update spans a branch" |
1204 " affecting the following files:\n") | 1204 " affecting the following files:\n")) |
1205 fl = merge.keys() + get.keys() | 1205 fl = merge.keys() + get.keys() |
1206 fl.sort() | 1206 fl.sort() |
1207 for f in fl: | 1207 for f in fl: |
1208 cf = "" | 1208 cf = "" |
1209 if f in merge: cf = " (resolve)" | 1209 if f in merge: cf = _(" (resolve)") |
1210 self.ui.status(" %s%s\n" % (f, cf)) | 1210 self.ui.status(" %s%s\n" % (f, cf)) |
1211 self.ui.warn("aborting update spanning branches!\n") | 1211 self.ui.warn(_("aborting update spanning branches!\n")) |
1212 self.ui.status("(use update -m to merge across branches" + | 1212 self.ui.status(_("(use update -m to merge across branches" |
1213 " or -C to lose changes)\n") | 1213 " or -C to lose changes)\n")) |
1214 return 1 | 1214 return 1 |
1215 branch_merge = True | 1215 branch_merge = True |
1216 | 1216 |
1217 if moddirstate: | 1217 if moddirstate: |
1218 self.dirstate.setparents(p1, p2) | 1218 self.dirstate.setparents(p1, p2) |
1220 # get the files we don't need to change | 1220 # get the files we don't need to change |
1221 files = get.keys() | 1221 files = get.keys() |
1222 files.sort() | 1222 files.sort() |
1223 for f in files: | 1223 for f in files: |
1224 if f[0] == "/": continue | 1224 if f[0] == "/": continue |
1225 self.ui.note("getting %s\n" % f) | 1225 self.ui.note(_("getting %s\n") % f) |
1226 t = self.file(f).read(get[f]) | 1226 t = self.file(f).read(get[f]) |
1227 try: | 1227 try: |
1228 self.wwrite(f, t) | 1228 self.wwrite(f, t) |
1229 except IOError, e: | 1229 except IOError, e: |
1230 if e.errno != errno.ENOENT: | 1230 if e.errno != errno.ENOENT: |
1240 | 1240 |
1241 # merge the tricky bits | 1241 # merge the tricky bits |
1242 files = merge.keys() | 1242 files = merge.keys() |
1243 files.sort() | 1243 files.sort() |
1244 for f in files: | 1244 for f in files: |
1245 self.ui.status("merging %s\n" % f) | 1245 self.ui.status(_("merging %s\n") % f) |
1246 my, other, flag = merge[f] | 1246 my, other, flag = merge[f] |
1247 self.merge3(f, my, other) | 1247 self.merge3(f, my, other) |
1248 util.set_exec(self.wjoin(f), flag) | 1248 util.set_exec(self.wjoin(f), flag) |
1249 if moddirstate: | 1249 if moddirstate: |
1250 if branch_merge: | 1250 if branch_merge: |
1260 f_len = len(self.file(f).read(other)) | 1260 f_len = len(self.file(f).read(other)) |
1261 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1) | 1261 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1) |
1262 | 1262 |
1263 remove.sort() | 1263 remove.sort() |
1264 for f in remove: | 1264 for f in remove: |
1265 self.ui.note("removing %s\n" % f) | 1265 self.ui.note(_("removing %s\n") % f) |
1266 try: | 1266 try: |
1267 os.unlink(self.wjoin(f)) | 1267 os.unlink(self.wjoin(f)) |
1268 except OSError, inst: | 1268 except OSError, inst: |
1269 if inst.errno != errno.ENOENT: | 1269 if inst.errno != errno.ENOENT: |
1270 self.ui.warn("update failed to remove %s: %s!\n" % | 1270 self.ui.warn(_("update failed to remove %s: %s!\n") % |
1271 (f, inst.strerror)) | 1271 (f, inst.strerror)) |
1272 # try removing directories that might now be empty | 1272 # try removing directories that might now be empty |
1273 try: os.removedirs(os.path.dirname(self.wjoin(f))) | 1273 try: os.removedirs(os.path.dirname(self.wjoin(f))) |
1274 except: pass | 1274 except: pass |
1275 if moddirstate: | 1275 if moddirstate: |
1293 base = fl.ancestor(my, other) | 1293 base = fl.ancestor(my, other) |
1294 a = self.wjoin(fn) | 1294 a = self.wjoin(fn) |
1295 b = temp("base", base) | 1295 b = temp("base", base) |
1296 c = temp("other", other) | 1296 c = temp("other", other) |
1297 | 1297 |
1298 self.ui.note("resolving %s\n" % fn) | 1298 self.ui.note(_("resolving %s\n") % fn) |
1299 self.ui.debug("file %s: my %s other %s ancestor %s\n" % | 1299 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") % |
1300 (fn, short(my), short(other), short(base))) | 1300 (fn, short(my), short(other), short(base))) |
1301 | 1301 |
1302 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge") | 1302 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge") |
1303 or "hgmerge") | 1303 or "hgmerge") |
1304 r = os.system("%s %s %s %s" % (cmd, a, b, c)) | 1304 r = os.system("%s %s %s %s" % (cmd, a, b, c)) |
1305 if r: | 1305 if r: |
1306 self.ui.warn("merging %s failed!\n" % fn) | 1306 self.ui.warn(_("merging %s failed!\n") % fn) |
1307 | 1307 |
1308 os.unlink(b) | 1308 os.unlink(b) |
1309 os.unlink(c) | 1309 os.unlink(c) |
1310 | 1310 |
1311 def verify(self): | 1311 def verify(self): |
1318 def err(msg): | 1318 def err(msg): |
1319 self.ui.warn(msg + "\n") | 1319 self.ui.warn(msg + "\n") |
1320 errors[0] += 1 | 1320 errors[0] += 1 |
1321 | 1321 |
1322 seen = {} | 1322 seen = {} |
1323 self.ui.status("checking changesets\n") | 1323 self.ui.status(_("checking changesets\n")) |
1324 for i in range(self.changelog.count()): | 1324 for i in range(self.changelog.count()): |
1325 changesets += 1 | 1325 changesets += 1 |
1326 n = self.changelog.node(i) | 1326 n = self.changelog.node(i) |
1327 l = self.changelog.linkrev(n) | 1327 l = self.changelog.linkrev(n) |
1328 if l != i: | 1328 if l != i: |
1329 err("incorrect link (%d) for changeset revision %d" % (l, i)) | 1329 err(_("incorrect link (%d) for changeset revision %d") %(l, i)) |
1330 if n in seen: | 1330 if n in seen: |
1331 err("duplicate changeset at revision %d" % i) | 1331 err(_("duplicate changeset at revision %d") % i) |
1332 seen[n] = 1 | 1332 seen[n] = 1 |
1333 | 1333 |
1334 for p in self.changelog.parents(n): | 1334 for p in self.changelog.parents(n): |
1335 if p not in self.changelog.nodemap: | 1335 if p not in self.changelog.nodemap: |
1336 err("changeset %s has unknown parent %s" % | 1336 err(_("changeset %s has unknown parent %s") % |
1337 (short(n), short(p))) | 1337 (short(n), short(p))) |
1338 try: | 1338 try: |
1339 changes = self.changelog.read(n) | 1339 changes = self.changelog.read(n) |
1340 except Exception, inst: | 1340 except Exception, inst: |
1341 err("unpacking changeset %s: %s" % (short(n), inst)) | 1341 err(_("unpacking changeset %s: %s") % (short(n), inst)) |
1342 | 1342 |
1343 neededmanifests[changes[0]] = n | 1343 neededmanifests[changes[0]] = n |
1344 | 1344 |
1345 for f in changes[3]: | 1345 for f in changes[3]: |
1346 filelinkrevs.setdefault(f, []).append(i) | 1346 filelinkrevs.setdefault(f, []).append(i) |
1347 | 1347 |
1348 seen = {} | 1348 seen = {} |
1349 self.ui.status("checking manifests\n") | 1349 self.ui.status(_("checking manifests\n")) |
1350 for i in range(self.manifest.count()): | 1350 for i in range(self.manifest.count()): |
1351 n = self.manifest.node(i) | 1351 n = self.manifest.node(i) |
1352 l = self.manifest.linkrev(n) | 1352 l = self.manifest.linkrev(n) |
1353 | 1353 |
1354 if l < 0 or l >= self.changelog.count(): | 1354 if l < 0 or l >= self.changelog.count(): |
1355 err("bad manifest link (%d) at revision %d" % (l, i)) | 1355 err(_("bad manifest link (%d) at revision %d") % (l, i)) |
1356 | 1356 |
1357 if n in neededmanifests: | 1357 if n in neededmanifests: |
1358 del neededmanifests[n] | 1358 del neededmanifests[n] |
1359 | 1359 |
1360 if n in seen: | 1360 if n in seen: |
1361 err("duplicate manifest at revision %d" % i) | 1361 err(_("duplicate manifest at revision %d") % i) |
1362 | 1362 |
1363 seen[n] = 1 | 1363 seen[n] = 1 |
1364 | 1364 |
1365 for p in self.manifest.parents(n): | 1365 for p in self.manifest.parents(n): |
1366 if p not in self.manifest.nodemap: | 1366 if p not in self.manifest.nodemap: |
1367 err("manifest %s has unknown parent %s" % | 1367 err(_("manifest %s has unknown parent %s") % |
1368 (short(n), short(p))) | 1368 (short(n), short(p))) |
1369 | 1369 |
1370 try: | 1370 try: |
1371 delta = mdiff.patchtext(self.manifest.delta(n)) | 1371 delta = mdiff.patchtext(self.manifest.delta(n)) |
1372 except KeyboardInterrupt: | 1372 except KeyboardInterrupt: |
1373 self.ui.warn("interrupted") | 1373 self.ui.warn(_("interrupted")) |
1374 raise | 1374 raise |
1375 except Exception, inst: | 1375 except Exception, inst: |
1376 err("unpacking manifest %s: %s" % (short(n), inst)) | 1376 err(_("unpacking manifest %s: %s") % (short(n), inst)) |
1377 | 1377 |
1378 ff = [ l.split('\0') for l in delta.splitlines() ] | 1378 ff = [ l.split('\0') for l in delta.splitlines() ] |
1379 for f, fn in ff: | 1379 for f, fn in ff: |
1380 filenodes.setdefault(f, {})[bin(fn[:40])] = 1 | 1380 filenodes.setdefault(f, {})[bin(fn[:40])] = 1 |
1381 | 1381 |
1382 self.ui.status("crosschecking files in changesets and manifests\n") | 1382 self.ui.status(_("crosschecking files in changesets and manifests\n")) |
1383 | 1383 |
1384 for m,c in neededmanifests.items(): | 1384 for m,c in neededmanifests.items(): |
1385 err("Changeset %s refers to unknown manifest %s" % | 1385 err(_("Changeset %s refers to unknown manifest %s") % |
1386 (short(m), short(c))) | 1386 (short(m), short(c))) |
1387 del neededmanifests | 1387 del neededmanifests |
1388 | 1388 |
1389 for f in filenodes: | 1389 for f in filenodes: |
1390 if f not in filelinkrevs: | 1390 if f not in filelinkrevs: |
1391 err("file %s in manifest but not in changesets" % f) | 1391 err(_("file %s in manifest but not in changesets") % f) |
1392 | 1392 |
1393 for f in filelinkrevs: | 1393 for f in filelinkrevs: |
1394 if f not in filenodes: | 1394 if f not in filenodes: |
1395 err("file %s in changeset but not in manifest" % f) | 1395 err(_("file %s in changeset but not in manifest") % f) |
1396 | 1396 |
1397 self.ui.status("checking files\n") | 1397 self.ui.status(_("checking files\n")) |
1398 ff = filenodes.keys() | 1398 ff = filenodes.keys() |
1399 ff.sort() | 1399 ff.sort() |
1400 for f in ff: | 1400 for f in ff: |
1401 if f == "/dev/null": continue | 1401 if f == "/dev/null": continue |
1402 files += 1 | 1402 files += 1 |
1406 for i in range(fl.count()): | 1406 for i in range(fl.count()): |
1407 revisions += 1 | 1407 revisions += 1 |
1408 n = fl.node(i) | 1408 n = fl.node(i) |
1409 | 1409 |
1410 if n in seen: | 1410 if n in seen: |
1411 err("%s: duplicate revision %d" % (f, i)) | 1411 err(_("%s: duplicate revision %d") % (f, i)) |
1412 if n not in filenodes[f]: | 1412 if n not in filenodes[f]: |
1413 err("%s: %d:%s not in manifests" % (f, i, short(n))) | 1413 err(_("%s: %d:%s not in manifests") % (f, i, short(n))) |
1414 else: | 1414 else: |
1415 del filenodes[f][n] | 1415 del filenodes[f][n] |
1416 | 1416 |
1417 flr = fl.linkrev(n) | 1417 flr = fl.linkrev(n) |
1418 if flr not in filelinkrevs[f]: | 1418 if flr not in filelinkrevs[f]: |
1419 err("%s:%s points to unexpected changeset %d" | 1419 err(_("%s:%s points to unexpected changeset %d") |
1420 % (f, short(n), flr)) | 1420 % (f, short(n), flr)) |
1421 else: | 1421 else: |
1422 filelinkrevs[f].remove(flr) | 1422 filelinkrevs[f].remove(flr) |
1423 | 1423 |
1424 # verify contents | 1424 # verify contents |
1425 try: | 1425 try: |
1426 t = fl.read(n) | 1426 t = fl.read(n) |
1427 except Exception, inst: | 1427 except Exception, inst: |
1428 err("unpacking file %s %s: %s" % (f, short(n), inst)) | 1428 err(_("unpacking file %s %s: %s") % (f, short(n), inst)) |
1429 | 1429 |
1430 # verify parents | 1430 # verify parents |
1431 (p1, p2) = fl.parents(n) | 1431 (p1, p2) = fl.parents(n) |
1432 if p1 not in nodes: | 1432 if p1 not in nodes: |
1433 err("file %s:%s unknown parent 1 %s" % | 1433 err(_("file %s:%s unknown parent 1 %s") % |
1434 (f, short(n), short(p1))) | 1434 (f, short(n), short(p1))) |
1435 if p2 not in nodes: | 1435 if p2 not in nodes: |
1436 err("file %s:%s unknown parent 2 %s" % | 1436 err(_("file %s:%s unknown parent 2 %s") % |
1437 (f, short(n), short(p1))) | 1437 (f, short(n), short(p1))) |
1438 nodes[n] = 1 | 1438 nodes[n] = 1 |
1439 | 1439 |
1440 # cross-check | 1440 # cross-check |
1441 for node in filenodes[f]: | 1441 for node in filenodes[f]: |
1442 err("node %s in manifests not in %s" % (hex(node), f)) | 1442 err(_("node %s in manifests not in %s") % (hex(node), f)) |
1443 | 1443 |
1444 self.ui.status("%d files, %d changesets, %d total revisions\n" % | 1444 self.ui.status(_("%d files, %d changesets, %d total revisions\n") % |
1445 (files, changesets, revisions)) | 1445 (files, changesets, revisions)) |
1446 | 1446 |
1447 if errors[0]: | 1447 if errors[0]: |
1448 self.ui.warn("%d integrity errors encountered!\n" % errors[0]) | 1448 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0]) |
1449 return 1 | 1449 return 1 |