Mercurial > hgsubversion
comparison hg_delta_editor.py @ 316:c3c647aff97c
Merge with danchr's changes.
author | Augie Fackler <durin42@gmail.com> |
---|---|
date | Sun, 03 May 2009 21:44:53 -0500 |
parents | 2257bfc01749 1d48d9a34c19 |
children | 5dc8fee7fc96 |
comparison
equal
deleted
inserted
replaced
315:963d27a0b1c2 | 316:c3c647aff97c |
---|---|
14 from svn import delta | 14 from svn import delta |
15 from svn import core | 15 from svn import core |
16 | 16 |
17 import svnexternals | 17 import svnexternals |
18 import util | 18 import util |
19 from maps import * | |
19 | 20 |
20 def pickle_atomic(data, file_path, dir=None): | 21 def pickle_atomic(data, file_path, dir=None): |
21 """pickle some data to a path atomically. | 22 """pickle some data to a path atomically. |
22 | 23 |
23 This is present because I kept corrupting my revmap by managing to hit ^C | 24 This is present because I kept corrupting my revmap by managing to hit ^C |
117 f.close() | 118 f.close() |
118 else: | 119 else: |
119 self.tag_locations = tag_locations | 120 self.tag_locations = tag_locations |
120 pickle_atomic(self.tag_locations, self.tag_locations_file, | 121 pickle_atomic(self.tag_locations, self.tag_locations_file, |
121 self.meta_data_dir) | 122 self.meta_data_dir) |
123 # ensure nested paths are handled properly | |
124 self.tag_locations.sort() | |
125 self.tag_locations.reverse() | |
122 | 126 |
123 self.clear_current_info() | 127 self.clear_current_info() |
124 self.author_host = author_host | 128 self.authors = AuthorMap(self.ui, self.authors_file, |
125 self.authors = {} | 129 defaulthost=author_host) |
126 if os.path.exists(self.authors_file): | 130 if authors: self.authors.load(authors) |
127 self.readauthors(self.authors_file) | |
128 if authors and os.path.exists(authors): | |
129 self.readauthors(authors) | |
130 if self.authors: | |
131 self.writeauthors() | |
132 | 131 |
133 self.lastdate = '1970-01-01 00:00:00 -0000' | 132 self.lastdate = '1970-01-01 00:00:00 -0000' |
134 self.includepaths = {} | 133 self.includepaths = {} |
135 self.excludepaths = {} | 134 self.excludepaths = {} |
136 if filemap and os.path.exists(filemap): | 135 if filemap and os.path.exists(filemap): |
254 return branches | 253 return branches |
255 | 254 |
256 def _path_and_branch_for_path(self, path, existing=True): | 255 def _path_and_branch_for_path(self, path, existing=True): |
257 return self._split_branch_path(path, existing=existing)[:2] | 256 return self._split_branch_path(path, existing=existing)[:2] |
258 | 257 |
258 def _branch_for_path(self, path, existing=True): | |
259 return self._path_and_branch_for_path(path, existing=existing)[1] | |
260 | |
259 def _localname(self, path): | 261 def _localname(self, path): |
260 """Compute the local name for a branch located at path. | 262 """Compute the local name for a branch located at path. |
261 """ | 263 """ |
262 assert not path.startswith('tags/') | 264 assert not path.startswith('tags/') |
263 if path == 'trunk': | 265 if path == 'trunk': |
391 Note that it's only a tag if it was copied from the path '' in a branch (or tag) | 393 Note that it's only a tag if it was copied from the path '' in a branch (or tag) |
392 we have, for our purposes. | 394 we have, for our purposes. |
393 | 395 |
394 Otherwise, returns False. | 396 Otherwise, returns False. |
395 """ | 397 """ |
398 return self._split_tag_path(path)[1] or False | |
399 | |
400 def _split_tag_path(self, path): | |
401 """Figure out which tag inside our repo this path represents, and | |
402 also figure out which path inside that tag it is. | |
403 | |
404 Returns a tuple of (path within tag, tag name, server-side tag | |
405 path). | |
406 """ | |
396 path = self._normalize_path(path) | 407 path = self._normalize_path(path) |
397 for tags_path in self.tag_locations: | 408 for tags_path in self.tag_locations: |
398 if path and (path.startswith(tags_path) and | 409 if path and (path.startswith(tags_path) and |
399 len(path) > len('%s/' % tags_path)): | 410 len(path) > len('%s/' % tags_path)): |
400 return path[len(tags_path)+1:] | 411 tag, _, subpath = path[len(tags_path)+1:].partition('/') |
401 return False | 412 return (subpath, tag, '%s/%s' % (tags_path, tag)) |
413 return (None, None, None) | |
402 | 414 |
403 def get_parent_svn_branch_and_rev(self, number, branch): | 415 def get_parent_svn_branch_and_rev(self, number, branch): |
404 number -= 1 | 416 number -= 1 |
405 if (number, branch) in self.revmap: | 417 if (number, branch) in self.revmap: |
406 return number, branch | 418 return number, branch |
624 current_ctx = context.memctx(self.repo, | 636 current_ctx = context.memctx(self.repo, |
625 parents, | 637 parents, |
626 rev.message or ' ', | 638 rev.message or ' ', |
627 files, | 639 files, |
628 del_all_files, | 640 del_all_files, |
629 self.authorforsvnauthor(rev.author), | 641 self.authors[rev.author], |
630 date, | 642 date, |
631 {'branch': 'closed-branches'}) | 643 {'branch': 'closed-branches'}) |
632 new_hash = self.repo.commitctx(current_ctx) | 644 new_hash = self.repo.commitctx(current_ctx) |
633 self.ui.status('Marked branch %s as closed.\n' % (branch or | 645 self.ui.status('Marked branch %s as closed.\n' % (branch or |
634 'default')) | 646 'default')) |
677 current_ctx = context.memctx(self.repo, | 689 current_ctx = context.memctx(self.repo, |
678 parents, | 690 parents, |
679 rev.message or '...', | 691 rev.message or '...', |
680 files.keys(), | 692 files.keys(), |
681 filectxfn, | 693 filectxfn, |
682 self.authorforsvnauthor(rev.author), | 694 self.authors[rev.author], |
683 date, | 695 date, |
684 extra) | 696 extra) |
685 new_hash = self.repo.commitctx(current_ctx) | 697 new_hash = self.repo.commitctx(current_ctx) |
686 util.describe_commit(self.ui, new_hash, branch) | 698 util.describe_commit(self.ui, new_hash, branch) |
687 if (rev.revnum, branch) not in self.revmap: | 699 if (rev.revnum, branch) not in self.revmap: |
704 current_ctx = context.memctx(self.repo, | 716 current_ctx = context.memctx(self.repo, |
705 (ha, node.nullid), | 717 (ha, node.nullid), |
706 rev.message or ' ', | 718 rev.message or ' ', |
707 [], | 719 [], |
708 del_all_files, | 720 del_all_files, |
709 self.authorforsvnauthor(rev.author), | 721 self.authors[rev.author], |
710 date, | 722 date, |
711 extra) | 723 extra) |
712 new_hash = self.repo.commitctx(current_ctx) | 724 new_hash = self.repo.commitctx(current_ctx) |
713 util.describe_commit(self.ui, new_hash, branch) | 725 util.describe_commit(self.ui, new_hash, branch) |
714 if (rev.revnum, branch) not in self.revmap: | 726 if (rev.revnum, branch) not in self.revmap: |
715 self.add_to_revmap(rev.revnum, branch, new_hash) | 727 self.add_to_revmap(rev.revnum, branch, new_hash) |
716 self._save_metadata() | 728 self._save_metadata() |
717 self.clear_current_info() | 729 self.clear_current_info() |
718 | |
719 def authorforsvnauthor(self, author): | |
720 if author in self.authors: | |
721 return self.authors[author] | |
722 return '%s%s' % (author, self.author_host) | |
723 | |
724 def svnauthorforauthor(self, author): | |
725 for svnauthor, hgauthor in self.authors.iteritems(): | |
726 if author == hgauthor: | |
727 return svnauthor | |
728 else: | |
729 # return the original svn-side author | |
730 return author.rsplit('@', 1)[0] | |
731 | |
732 def readauthors(self, authorfile): | |
733 self.ui.note(('Reading authormap from %s\n') % authorfile) | |
734 f = open(authorfile, 'r') | |
735 for line in f: | |
736 if not line.strip(): | |
737 continue | |
738 try: | |
739 srcauth, dstauth = line.split('=', 1) | |
740 srcauth = srcauth.strip() | |
741 dstauth = dstauth.strip() | |
742 if srcauth in self.authors and dstauth != self.authors[srcauth]: | |
743 self.ui.status(('Overriding author mapping for "%s" ' + | |
744 'from "%s" to "%s"\n') | |
745 % (srcauth, self.authors[srcauth], dstauth)) | |
746 else: | |
747 self.ui.debug(('Mapping author "%s" to "%s"\n') | |
748 % (srcauth, dstauth)) | |
749 self.authors[srcauth] = dstauth | |
750 except IndexError: | |
751 self.ui.warn( | |
752 ('Ignoring bad line in author map file %s: %s\n') | |
753 % (authorfile, line.rstrip())) | |
754 f.close() | |
755 | |
756 def writeauthors(self): | |
757 self.ui.debug(('Writing author map to %s\n') % self.authors_file) | |
758 f = open(self.authors_file, 'w+') | |
759 for author in self.authors: | |
760 f.write("%s=%s\n" % (author, self.authors[author])) | |
761 f.close() | |
762 | 730 |
763 def readfilemap(self, filemapfile): | 731 def readfilemap(self, filemapfile): |
764 self.ui.note( | 732 self.ui.note( |
765 ('Reading file map from %s\n') | 733 ('Reading file map from %s\n') |
766 % filemapfile) | 734 % filemapfile) |
912 if f in files: | 880 if f in files: |
913 return False | 881 return False |
914 # parentctx is not an ancestor of childctx, files are unrelated | 882 # parentctx is not an ancestor of childctx, files are unrelated |
915 return False | 883 return False |
916 | 884 |
917 def add_file(self, path, parent_baton, copyfrom_path, | 885 def add_file(self, path, parent_baton=None, copyfrom_path=None, |
918 copyfrom_revision, file_pool=None): | 886 copyfrom_revision=None, file_pool=None): |
919 self.current_file = None | 887 self.current_file = None |
920 self.base_revision = None | 888 self.base_revision = None |
921 if path in self.deleted_files: | 889 if path in self.deleted_files: |
922 del self.deleted_files[path] | 890 del self.deleted_files[path] |
923 fpath, branch = self._path_and_branch_for_path(path, existing=False) | 891 fpath, branch = self._path_and_branch_for_path(path, existing=False) |