# HG changeset patch # User Augie Fackler # Date 1249736484 18000 # Node ID c2d0e738c8993e266e56a0590862c0a1796d2163 # Parent 3855865ba53d9ca75d908e6aa42a0d0fbed1305f# Parent 37718f514acb3d1879897a701a376628b668eee5 Merge win32 fixes diff --git a/README b/README --- a/README +++ b/README @@ -14,8 +14,7 @@ internals of Mercurial and/or Subversion Installation ------------ You need to have Subversion installed with the SWIG Python bindings -from Subversion 1.5 or later. You need Mercurial 1.3 (currently in development) -or later. +from Subversion 1.5 or later. You need Mercurial 1.3 or later. .. _mercurial: http://selenic.com/repo/hg .. _mercurial-stable: http://selenic.com/repo/hg-stable @@ -27,8 +26,7 @@ the UsingExtensions_ page in the Mercuri for specifying an absolute path near the bottom of the page. You want to give the path to the top level of your clone of this repository. -.. _UsingExtensions: http://www.selenic.com/mercurial/wiki/index.cgi/ - UsingExtensions +.. _UsingExtensions: http://mercurial.selenic.com/wiki/UsingExtensions Before using hgsubversion, I *strongly* encourage you to run the automated tests. Just use nose_ if you have it (or ``easy_install @@ -47,12 +45,10 @@ Get a new clone of an svn server:: Real example:: - $ hg clone svn+http://python-nose.googlecode.com/svn nose-hg + $ hg clone http://python-nose.googlecode.com/svn nose-hg Note, you should pull from the root subversion directory, not specific -folders (such as trunk). Also, you only need to modify http:// urls as shown. -This is a side effect of Mercurial and Subversion both claiming the http -protocol, so svn+http is used to work around that. +folders (such as trunk). Pull new revisions into an already-converted repo:: diff --git a/hgsubversion/__init__.py b/hgsubversion/__init__.py --- a/hgsubversion/__init__.py +++ b/hgsubversion/__init__.py @@ -9,7 +9,7 @@ compatability can be offered. It is, how it works, and a good platform for further improvements. Before using hgsubversion, we *strongly* encourage running the -automated tests. See `README' in the hgsubversion directory for +automated tests. See 'README' in the hgsubversion directory for details. The operation of hgsubversion can be customised with the following variables: @@ -28,115 +28,74 @@ from mercurial import commands from mercurial import extensions from mercurial import hg from mercurial import util as hgutil -from mercurial import cmdutil as hgcmdutil +from mercurial import demandimport +demandimport.ignore.extend([ + 'svn', + 'svn.client', + 'svn.core', + 'svn.delta', + 'svn.ra', + ]) from svn import core import svncommands -import cmdutil -import svnrepo import util +import svnrepo import wrappers -import svnexternals - -optionmap = { - 'tagpaths': ('hgsubversion', 'tagpaths'), - 'authors': ('hgsubversion', 'authormap'), - 'filemap': ('hgsubversion', 'filemap'), - 'stupid': ('hgsubversion', 'stupid'), - 'defaulthost': ('hgsubversion', 'defaulthost'), - 'defaultauthors': ('hgsubversion', 'defaultauthors'), - 'usebranchnames': ('hgsubversion', 'usebranchnames'), + +svnopts = [ + ('', 'stupid', None, + 'use slower, but more compatible, protocol for Subversion'), +] + +wrapcmds = { # cmd: generic, target, fixdoc, ppopts, opts + 'parents': (False, None, False, False, [ + ('', 'svn', None, 'show parent svn revision instead'), + ]), + 'diff': (False, None, False, False, [ + ('', 'svn', None, 'show svn diffs against svn parent'), + ]), + 'pull': (True, 'sources', True, True, []), + 'push': (True, 'destinations', True, True, []), + 'incoming': (False, 'sources', True, True, []), + 'clone': (False, 'sources', True, True, [ + ('T', 'tagpaths', '', + 'list of paths to search for tags in Subversion repositories'), + ('A', 'authors', '', + 'file mapping Subversion usernames to Mercurial authors'), + ('', 'filemap', '', + 'file containing rules for remapping Subversion repository paths'), + ]), } -dontretain = { 'hgsubversion': set(['authormap', 'filemap']) } - -svnopts = (('', 'stupid', None, 'use slower, but more compatible, protocol for ' - 'Subversion'),) - -svncloneopts = (('T', 'tagpaths', '', 'list of path s to search for tags ' - 'in Subversion repositories'), - ('A', 'authors', '', 'path to file mapping Subversion ' - 'usernames to Mercurial authors'), - ('', 'filemap', '', 'path to file containing rules for ' - 'remapping Subversion repository paths'),) - -def wrapper(orig, ui, repo, *args, **opts): - """ - Subversion %(target)s can be used for %(command)s. See 'hg help - %(extension)s' for more on the conversion process. - """ - for opt, (section, name) in optionmap.iteritems(): - if opt in opts and opts[opt]: - if isinstance(repo, str): - ui.setconfig(section, name, opts.pop(opt)) - else: - repo.ui.setconfig(section, name, opts.pop(opt)) - - return orig(ui, repo, *args, **opts) - -def clonewrapper(orig, ui, source, dest=None, **opts): - """ - Some of the options listed below only apply to Subversion - %(target)s. See 'hg help %(extension)s' for more information on - them as well as other ways of customising the conversion process. - """ - - for opt, (section, name) in optionmap.iteritems(): - if opt in opts and opts[opt]: - ui.setconfig(section, name, str(opts.pop(opt))) - - # this must be kept in sync with mercurial/commands.py - srcrepo, dstrepo = hg.clone(hgcmdutil.remoteui(ui, opts), source, dest, - pull=opts.get('pull'), - stream=opts.get('uncompressed'), - rev=opts.get('rev'), - update=not opts.get('noupdate')) - - if dstrepo.local() and srcrepo.capable('subversion'): - fd = dstrepo.opener("hgrc", "a", text=True) - for section in set(s for s, v in optionmap.itervalues()): - config = dict(ui.configitems(section)) - for name in dontretain[section]: - config.pop(name, None) - - if config: - fd.write('\n[%s]\n' % section) - map(fd.write, ('%s = %s\n' % p for p in config.iteritems())) def uisetup(ui): - """Do our UI setup. - - Does the following wrappings: - * parent -> utility_commands.parent - * outgoing -> utility_commands.outgoing - """ - entry = extensions.wrapcommand(commands.table, 'parents', - wrappers.parent) - entry[1].append(('', 'svn', None, "show parent svn revision instead")) - entry = extensions.wrapcommand(commands.table, 'diff', - wrappers.diff) - entry[1].append(('', 'svn', None, - "show svn-style diffs, default against svn parent")) - - for command, target, isclone in [('clone', 'sources', True), - ('pull', 'sources', False), - ('push', 'destinations', False)]: - doc = wrapper.__doc__.strip() % { 'command': command, - 'Command': command.capitalize(), - 'extension': 'hgsubversion', - 'target': target } - fn = getattr(commands, command) - fn.__doc__ = fn.__doc__.rstrip() + '\n\n ' + doc - entry = extensions.wrapcommand(commands.table, command, - (wrapper, clonewrapper)[isclone]) - entry[1].extend(svnopts) - if isclone: entry[1].extend(svncloneopts) + """insert command wrappers for a bunch of commands""" + + docvals = {'extension': 'hgsubversion'} + for cmd, (generic, target, fixdoc, ppopts, opts) in wrapcmds.iteritems(): + + if fixdoc: + docvals['command'] = cmd + docvals['Command'] = cmd.capitalize() + docvals['target'] = target + doc = wrappers.generic.__doc__.strip() % docvals + fn = getattr(commands, cmd) + fn.__doc__ = fn.__doc__.rstrip() + '\n\n ' + doc + + wrapped = generic and wrappers.generic or getattr(wrappers, cmd) + entry = extensions.wrapcommand(commands.table, cmd, wrapped) + if ppopts: + entry[1].extend(svnopts) + if opts: + entry[1].extend(opts) try: rebase = extensions.find('rebase') - if rebase: - entry = extensions.wrapcommand(rebase.cmdtable, 'rebase', wrappers.rebase) - entry[1].append(('', 'svn', None, 'automatic svn rebase', )) + if not rebase: + return + entry = extensions.wrapcommand(rebase.cmdtable, 'rebase', wrappers.rebase) + entry[1].append(('', 'svn', None, 'automatic svn rebase')) except: pass @@ -153,6 +112,11 @@ def svn(ui, repo, subcommand, *args, **o if len(candidates) == 1: subcommand = candidates[0] + # override subversion credentials + for key in ('username', 'password'): + if key in opts: + ui.setconfig('hgsubversion', key, opts[key]) + path = os.path.dirname(repo.path) try: commandfunc = svncommands.table[subcommand] @@ -181,14 +145,15 @@ def reposetup(ui, repo): def _lookup(url): - if cmdutil.islocalrepo(url): + if util.islocalrepo(url): return svnrepo else: return hg._local(url) # install scheme handlers hg.schemes.update({ 'file': _lookup, 'http': svnrepo, 'https': svnrepo, - 'svn': svnrepo, 'svn+ssh': svnrepo }) + 'svn': svnrepo, 'svn+ssh': svnrepo, 'svn+http': svnrepo, + 'svn+https': svnrepo}) cmdtable = { "svn": diff --git a/hgsubversion/hg_delta_editor.py b/hgsubversion/editor.py rename from hgsubversion/hg_delta_editor.py rename to hgsubversion/editor.py --- a/hgsubversion/hg_delta_editor.py +++ b/hgsubversion/editor.py @@ -1,49 +1,14 @@ import cStringIO -import cPickle as pickle -import os import sys -import tempfile -import traceback -from mercurial import context -from mercurial import hg -from mercurial import ui from mercurial import util as hgutil from mercurial import revlog from mercurial import node -from mercurial import error from svn import delta from svn import core -import svnexternals import util -import maps -class MissingPlainTextError(Exception): - """Exception raised when the repo lacks a source file required for replaying - a txdelta. - """ - -class ReplayException(Exception): - """Exception raised when you try and commit but the replay encountered an - exception. - """ - -def pickle_atomic(data, file_path, dir=None): - """pickle some data to a path atomically. - - This is present because I kept corrupting my revmap by managing to hit ^C - during the pickle of that file. - """ - try: - f, path = tempfile.mkstemp(prefix='pickling', dir=dir) - f = os.fdopen(f, 'w') - pickle.dump(data, f) - f.close() - except: #pragma: no cover - raise - else: - hgutil.rename(path, file_path) def ieditor(fn): """Helps identify methods used by the SVN editor interface. @@ -59,818 +24,110 @@ def ieditor(fn): try: return fn(self, *args, **kwargs) except: #pragma: no cover - if not hasattr(self, '_exception_info'): - self._exception_info = sys.exc_info() + if self.current.exception is not None: + self.current.exception = sys.exc_info() raise return fun -class HgChangeReceiver(delta.Editor): - def add_to_revmap(self, revnum, branch, node_hash): - f = open(self.revmap_file, 'a') - f.write(str(revnum) + ' ' + node.hex(node_hash) + ' ' + (branch or '') + '\n') - f.flush() - f.close() - self.revmap[revnum, branch] = node_hash - - def last_known_revision(self): - """Obtain the highest numbered -- i.e. latest -- revision known. - - Currently, this function just iterates over the entire revision map - using the max() builtin. This may be slow for extremely large - repositories, but for now, it's fast enough. - """ - try: - return max(k[0] for k in self.revmap.iterkeys()) - except ValueError: - return 0 - - def __init__(self, repo=None, path=None, ui_=None, - subdir='', author_host='', - tag_locations=[], - authors=None, filemap=None, uuid=None): - """path is the path to the target hg repo. +class RevisionData(object): - subdir is the subdirectory of the edits *on the svn server*. - It is needed for stripping paths off in certain cases. - """ - if repo and repo.ui and not ui_: - ui_ = repo.ui - if not ui_: - ui_ = ui.ui() - self.ui = ui_ - self.__setup_repo(uuid, repo, path, subdir) + __slots__ = [ + 'file', 'files', 'deleted', 'rev', 'execfiles', 'symlinks', 'batons', + 'copies', 'missing', 'emptybranches', 'base', 'externals', 'ui', + 'exception', + ] - if not author_host: - author_host = self.ui.config('hgsubversion', 'defaulthost', uuid) - if not authors: - authors = self.ui.config('hgsubversion', 'authormap') - if not filemap: - filemap = self.ui.config('hgsubversion', 'filemap') - if not tag_locations: - tag_locations = self.ui.configlist('hgsubversion', 'tagpaths', ['tags']) - self.usebranchnames = self.ui.configbool('hgsubversion', - 'usebranchnames', True) + def __init__(self, ui): + self.ui = ui + self.clear() - # FIXME: test that this hasn't changed! defer & compare? - self.subdir = subdir - if self.subdir and self.subdir[0] == '/': - self.subdir = self.subdir[1:] - self.branches = {} - if os.path.exists(self.branch_info_file): - f = open(self.branch_info_file) - self.branches = pickle.load(f) - f.close() - self.tags = {} - if os.path.exists(self.tag_locations_file): - f = open(self.tag_locations_file) - self.tag_locations = pickle.load(f) - f.close() - else: - self.tag_locations = tag_locations - pickle_atomic(self.tag_locations, self.tag_locations_file, - self.meta_data_dir) - # ensure nested paths are handled properly - self.tag_locations.sort() - self.tag_locations.reverse() - - self.clear_current_info() - self.authors = maps.AuthorMap(self.ui, self.authors_file, - defaulthost=author_host) - if authors: self.authors.load(authors) - - self.lastdate = '1970-01-01 00:00:00 -0000' - self.includepaths = {} - self.excludepaths = {} - if filemap and os.path.exists(filemap): - self.readfilemap(filemap) - - def fixdate(self, date): - if date is not None: - date = date.replace('T', ' ').replace('Z', '').split('.')[0] - date += ' -0000' - self.lastdate = date - else: - date = self.lastdate - return date - - def __setup_repo(self, uuid, repo, path, subdir): - """Verify the repo is going to work out for us. - - This method will fail an assertion if the repo exists but doesn't have - the Subversion metadata. - """ - if repo: - self.repo = repo - self.path = os.path.normpath(self.repo.join('..')) - elif path: - self.repo = hg.repository(self.ui, path, - create=(not os.path.exists(path))) - self.path = os.path.normpath(os.path.join(path, '..')) - else: #pragma: no cover - raise TypeError("editor requires either a path or a repository " - "specified") - - if not os.path.isdir(self.meta_data_dir): - os.makedirs(self.meta_data_dir) - self._set_uuid(uuid) - # TODO: validate subdir too - - if os.path.isfile(self.revmap_file): - self.revmap = util.parse_revmap(self.revmap_file) - else: - self.revmap = {} - f = open(self.revmap_file, 'w') - f.write('%s\n' % util.REVMAP_FILE_VERSION) - f.flush() - f.close() - - def clear_current_info(self): - '''Clear the info relevant to a replayed revision so that the next - revision can be replayed. - ''' - # Map files to raw svn data (symlink prefix is preserved) - self.current_files = {} - self.deleted_files = {} - self.current_rev = None - self.current_files_exec = {} - self.current_files_symlink = {} - self.dir_batons = {} + def clear(self): + self.file = None + self.files = {} + self.deleted = {} + self.rev = None + self.execfiles = {} + self.symlinks = {} + self.batons = {} # Map fully qualified destination file paths to module source path self.copies = {} - self.missing_plaintexts = set() - self.commit_branches_empty = {} - self.base_revision = None - self.branches_to_delete = set() + self.missing = set() + self.emptybranches = {} + self.base = None self.externals = {} + self.exception = None - def _save_metadata(self): - '''Save the Subversion metadata. This should really be called after - every revision is created. - ''' - pickle_atomic(self.branches, self.branch_info_file, self.meta_data_dir) - - def _path_and_branch_for_path(self, path, existing=True): - return self._split_branch_path(path, existing=existing)[:2] - - def _branch_for_path(self, path, existing=True): - return self._path_and_branch_for_path(path, existing=existing)[1] - - def _localname(self, path): - """Compute the local name for a branch located at path. - """ - assert not path.startswith('tags/') - if path == 'trunk': - return None - elif path.startswith('branches/'): - return path[len('branches/'):] - return '../%s' % path - - def _remotename(self, branch): - if branch == 'default' or branch is None: - return 'trunk' - elif branch.startswith('../'): - return branch[3:] - return 'branches/%s' % branch - - def _split_branch_path(self, path, existing=True): - """Figure out which branch inside our repo this path represents, and - also figure out which path inside that branch it is. - - Returns a tuple of (path within branch, local branch name, server-side branch path). - - If existing=True, will return None, None, None if the file isn't on some known - branch. If existing=False, then it will guess what the branch would be if it were - known. - """ - path = self._normalize_path(path) - if path.startswith('tags/'): - return None, None, None - test = '' - path_comps = path.split('/') - while self._localname(test) not in self.branches and len(path_comps): - if not test: - test = path_comps.pop(0) - else: - test += '/%s' % path_comps.pop(0) - if self._localname(test) in self.branches: - return path[len(test)+1:], self._localname(test), test - if existing: - return None, None, None - if path == 'trunk' or path.startswith('trunk/'): - path = path.split('/')[1:] - test = 'trunk' - elif path.startswith('branches/'): - elts = path.split('/') - test = '/'.join(elts[:2]) - path = '/'.join(elts[2:]) - else: - path = test.split('/')[-1] - test = '/'.join(test.split('/')[:-1]) - ln = self._localname(test) - if ln and ln.startswith('../'): - return None, None, None - return path, ln, test - - def set_current_rev(self, rev): - """Set the revision we're currently converting. - """ - self.current_rev = rev - - def set_file(self, path, data, isexec=False, islink=False): + def set(self, path, data, isexec=False, islink=False): if islink: data = 'link ' + data - self.current_files[path] = data - self.current_files_exec[path] = isexec - self.current_files_symlink[path] = islink - if path in self.deleted_files: - del self.deleted_files[path] - if path in self.missing_plaintexts: - self.missing_plaintexts.remove(path) - - def delete_file(self, path): - self.deleted_files[path] = True - if path in self.current_files: - del self.current_files[path] - self.current_files_exec[path] = False - self.current_files_symlink[path] = False + self.files[path] = data + self.execfiles[path] = isexec + self.symlinks[path] = islink + if path in self.deleted: + del self.deleted[path] + if path in self.missing: + self.missing.remove(path) + + def delete(self, path): + self.deleted[path] = True + if path in self.files: + del self.files[path] + self.execfiles[path] = False + self.symlinks[path] = False self.ui.note('D %s\n' % path) - def _normalize_path(self, path): - '''Normalize a path to strip of leading slashes and our subdir if we - have one. - ''' - if path and path[0] == '/': - path = path[1:] - if path and path.startswith(self.subdir): - path = path[len(self.subdir):] - if path and path[0] == '/': - path = path[1:] - return path - - def _is_file_included(self, subpath): - def checkpathinmap(path, mapping): - def rpairs(name): - yield '.', name - e = len(name) - while e != -1: - yield name[:e], name[e+1:] - e = name.rfind('/', 0, e) - - for pre, suf in rpairs(path): - try: - return mapping[pre] - except KeyError, err: - pass - return None - - if len(self.includepaths) and len(subpath): - inc = checkpathinmap(subpath, self.includepaths) - else: - inc = subpath - if len(self.excludepaths) and len(subpath): - exc = checkpathinmap(subpath, self.excludepaths) - else: - exc = None - if inc is None or exc is not None: - return False - return True - - def _is_path_valid(self, path): - if path is None: - return False - subpath = self._split_branch_path(path)[0] - if subpath is None: - return False - return self._is_file_included(subpath) - - def _is_path_tag(self, path): - """If path could represent the path to a tag, returns the potential tag - name. Otherwise, returns False. - - Note that it's only a tag if it was copied from the path '' in a branch - (or tag) we have, for our purposes. - """ - path = self._normalize_path(path) - for tagspath in self.tag_locations: - onpath = path.startswith(tagspath) - longer = len(path) > len('%s/' % tagspath) - if path and onpath and longer: - tag, subpath = path[len(tagspath) + 1:], '' - return tag - return False - - def get_parent_svn_branch_and_rev(self, number, branch): - number -= 1 - if (number, branch) in self.revmap: - return number, branch - real_num = 0 - for num, br in self.revmap.iterkeys(): - if br != branch: - continue - if num <= number and num > real_num: - real_num = num - if branch in self.branches: - parent_branch = self.branches[branch][0] - parent_branch_rev = self.branches[branch][1] - # check to see if this branch already existed and is the same - if parent_branch_rev < real_num: - return real_num, branch - # if that wasn't true, then this is the a new branch with the - # same name as some old deleted branch - if parent_branch_rev <= 0 and real_num == 0: - return None, None - branch_created_rev = self.branches[branch][2] - if parent_branch == 'trunk': - parent_branch = None - if branch_created_rev <= number+1 and branch != parent_branch: - return self.get_parent_svn_branch_and_rev( - parent_branch_rev+1, - parent_branch) - if real_num != 0: - return real_num, branch - return None, None - - def get_parent_revision(self, number, branch): - '''Get the parent revision hash for a commit on a specific branch. - ''' - r, br = self.get_parent_svn_branch_and_rev(number, branch) - if r is not None: - return self.revmap[r, br] - return revlog.nullid + def findmissing(self, svn): - def _svnpath(self, branch): - """Return the relative path in svn of branch. - """ - if branch == None or branch == 'default': - return 'trunk' - elif branch.startswith('../'): - return branch[3:] - return 'branches/%s' % branch - - def _determine_parent_branch(self, p, src_path, src_rev, revnum): - if src_path is not None: - src_file, src_branch = self._path_and_branch_for_path(src_path) - src_tag = self._is_path_tag(src_path) - if src_tag != False: - # also case 2 - src_branch, src_rev = self.tags[src_tag] - return {self._localname(p): (src_branch, src_rev, revnum )} - if src_file == '': - # case 2 - return {self._localname(p): (src_branch, src_rev, revnum )} - return {} - - def update_branch_tag_map_for_rev(self, revision): - paths = revision.paths - added_branches = {} - added_tags = {} - self.branches_to_delete = set() - tags_to_delete = set() - for p in sorted(paths): - t_name = self._is_path_tag(p) - if t_name != False: - src_p, src_rev = paths[p].copyfrom_path, paths[p].copyfrom_rev - # if you commit to a tag, I'm calling you stupid and ignoring - # you. - if src_p is not None and src_rev is not None: - file, branch = self._path_and_branch_for_path(src_p) - if file is None: - # some crazy people make tags from other tags - file = '' - from_tag = self._is_path_tag(src_p) - if not from_tag: - continue - branch, src_rev = self.tags[from_tag] - if t_name not in added_tags and file is '': - added_tags[t_name] = branch, src_rev - elif file: - t_name = t_name[:-(len(file)+1)] - if src_rev > added_tags[t_name][1]: - added_tags[t_name] = branch, src_rev - elif (paths[p].action == 'D' and p.endswith(t_name) - and t_name in self.tags): - tags_to_delete.add(t_name) - continue - # At this point we know the path is not a tag. In that - # case, we only care if it is the root of a new branch (in - # this function). This is determined by the following - # checks: - # 1. Is the file located inside any currently known - # branch? If yes, then we're done with it, this isn't - # interesting. - # 2. Does the file have copyfrom information? If yes, then - # we're done: this is a new branch, and we record the - # copyfrom in added_branches if it comes from the root - # of another branch, or create it from scratch. - # 3. Neither of the above. This could be a branch, but it - # might never work out for us. It's only ever a branch - # (as far as we're concerned) if it gets committed to, - # which we have to detect at file-write time anyway. So - # we do nothing here. - # 4. It's the root of an already-known branch, with an - # action of 'D'. We mark the branch as deleted. - # 5. It's the parent directory of one or more - # already-known branches, so we mark them as deleted. - # 6. It's a branch being replaced by another branch - the - # action will be 'R'. - fi, br = self._path_and_branch_for_path(p) - if fi is not None: - if fi == '': - if paths[p].action == 'D': - self.branches_to_delete.add(br) # case 4 - elif paths[p].action == 'R': - parent = self._determine_parent_branch( - p, paths[p].copyfrom_path, paths[p].copyfrom_rev, - revision.revnum) - added_branches.update(parent) - continue # case 1 - if paths[p].action == 'D': - for known in self.branches: - if self._svnpath(known).startswith(p): - self.branches_to_delete.add(known) # case 5 - parent = self._determine_parent_branch( - p, paths[p].copyfrom_path, paths[p].copyfrom_rev, revision.revnum) - if not parent and paths[p].copyfrom_path: - bpath, branch = self._path_and_branch_for_path(p, False) - if (bpath is not None - and branch not in self.branches - and branch not in added_branches): - parent = {branch: (None, 0, revision.revnum)} - added_branches.update(parent) - rmtags = dict((t, self.tags[t][0]) for t in tags_to_delete) - return { - 'tags': (added_tags, rmtags), - 'branches': (added_branches, self.branches_to_delete), - } - - def save_tbdelta(self, tbdelta): - for t in tbdelta['tags'][1]: - del self.tags[t] - for br in tbdelta['branches'][1]: - del self.branches[br] - for t, info in tbdelta['tags'][0].items(): - self.ui.status('Tagged %s@%s as %s\n' % - (info[0] or 'trunk', info[1], t)) - self.tags.update(tbdelta['tags'][0]) - self.branches.update(tbdelta['branches'][0]) - - def _updateexternals(self): - if not self.externals: + if not self.missing: return - # Accumulate externals records for all branches - revnum = self.current_rev.revnum - branches = {} - for path, entry in self.externals.iteritems(): - if not self._is_path_valid(path): - self.ui.warn('WARNING: Invalid path %s in externals\n' % path) - continue - p, b, bp = self._split_branch_path(path) - if bp not in branches: - external = svnexternals.externalsfile() - parent = self.get_parent_revision(revnum, b) - pctx = self.repo[parent] - if '.hgsvnexternals' in pctx: - external.read(pctx['.hgsvnexternals'].data()) - branches[bp] = external - else: - external = branches[bp] - external[p] = entry - - # Register the file changes - for bp, external in branches.iteritems(): - path = bp + '/.hgsvnexternals' - if external: - self.set_file(path, external.write(), False, False) - else: - self.delete_file(path) - - def branchedits(self, branch, rev): - check = lambda x: x[0][1] == branch and x[0][0] < rev.revnum - return sorted(filter(check, self.revmap.iteritems()), reverse=True) - - def committags(self, delta, rev, endbranches): - - date = self.fixdate(rev.date) - # determine additions/deletions per branch - branches = {} - for tag, source in delta[0].iteritems(): - b, r = source - branches.setdefault(b, []).append(('add', tag, r)) - for tag, branch in delta[1].iteritems(): - branches.setdefault(branch, []).append(('rm', tag, None)) - - for b, tags in branches.iteritems(): - - # modify parent's .hgtags source - parent = self.repo[{None: 'default'}.get(b, b)] - if '.hgtags' not in parent: - src = '' - else: - src = parent['.hgtags'].data() - for op, tag, r in sorted(tags, reverse=True): - if op == 'add': - tagged = node.hex(self.revmap[ - self.get_parent_svn_branch_and_rev(r+1, b)]) - elif op == 'rm': - tagged = node.hex(node.nullid) - src += '%s %s\n' % (tagged, tag) - - # add new changeset containing updated .hgtags - def fctxfun(repo, memctx, path): - return context.memfilectx(path='.hgtags', data=src, - islink=False, isexec=False, - copied=None) - extra = util.build_extra(rev.revnum, b, self.uuid, self.subdir) - ctx = context.memctx(self.repo, - (parent.node(), node.nullid), - rev.message or ' ', - ['.hgtags'], - fctxfun, - self.authors[rev.author], - date, - extra) - new = self.repo.commitctx(ctx) - if (rev.revnum, b) not in self.revmap: - self.add_to_revmap(rev.revnum, b, new) - if b in endbranches: - endbranches[b] = new - - def commit_current_delta(self, tbdelta): - if hasattr(self, '_exception_info'): #pragma: no cover - traceback.print_exception(*self._exception_info) - raise ReplayException() - if self.missing_plaintexts: - raise MissingPlainTextError() - self._updateexternals() - # paranoidly generate the list of files to commit - files_to_commit = set(self.current_files.keys()) - files_to_commit.update(self.current_files_symlink.keys()) - files_to_commit.update(self.current_files_exec.keys()) - files_to_commit.update(self.deleted_files.keys()) - # back to a list and sort so we get sane behavior - files_to_commit = list(files_to_commit) - files_to_commit.sort() - branch_batches = {} - rev = self.current_rev - date = self.fixdate(rev.date) - - # build up the branches that have files on them - for f in files_to_commit: - if not self._is_path_valid(f): - continue - p, b = self._path_and_branch_for_path(f) - if b not in branch_batches: - branch_batches[b] = [] - branch_batches[b].append((p, f)) - - closebranches = {} - for branch in tbdelta['branches'][1]: - branchedits = self.branchedits(branch, rev) - if len(branchedits) < 1: - # can't close a branch that never existed - continue - ha = branchedits[0][1] - closebranches[branch] = ha - - # 1. handle normal commits - closedrevs = closebranches.values() - for branch, files in branch_batches.iteritems(): - if branch in self.commit_branches_empty and files: - del self.commit_branches_empty[branch] - files = dict(files) - - parents = (self.get_parent_revision(rev.revnum, branch), - revlog.nullid) - if parents[0] in closedrevs and branch in self.branches_to_delete: - continue - extra = util.build_extra(rev.revnum, branch, self.uuid, self.subdir) - if branch is not None: - if (branch not in self.branches - and branch not in self.repo.branchtags()): - continue - parent_ctx = self.repo.changectx(parents[0]) - if '.hgsvnexternals' not in parent_ctx and '.hgsvnexternals' in files: - # Do not register empty externals files - if (files['.hgsvnexternals'] in self.current_files - and not self.current_files[files['.hgsvnexternals']]): - del files['.hgsvnexternals'] - - def filectxfn(repo, memctx, path): - current_file = files[path] - if current_file in self.deleted_files: - raise IOError() - copied = self.copies.get(current_file) - flags = parent_ctx.flags(path) - is_exec = self.current_files_exec.get(current_file, 'x' in flags) - is_link = self.current_files_symlink.get(current_file, 'l' in flags) - if current_file in self.current_files: - data = self.current_files[current_file] - if is_link and data.startswith('link '): - data = data[len('link '):] - elif is_link: - self.ui.warn('file marked as link, but contains data: ' - '%s (%r)\n' % (current_file, flags)) - else: - data = parent_ctx.filectx(path).data() - return context.memfilectx(path=path, - data=data, - islink=is_link, isexec=is_exec, - copied=copied) - if not self.usebranchnames: - extra.pop('branch', None) - current_ctx = context.memctx(self.repo, - parents, - rev.message or '...', - files.keys(), - filectxfn, - self.authors[rev.author], - date, - extra) - new_hash = self.repo.commitctx(current_ctx) - util.describe_commit(self.ui, new_hash, branch) - if (rev.revnum, branch) not in self.revmap: - self.add_to_revmap(rev.revnum, branch, new_hash) - - # 2. handle branches that need to be committed without any files - for branch in self.commit_branches_empty: - ha = self.get_parent_revision(rev.revnum, branch) - if ha == node.nullid: - continue - parent_ctx = self.repo.changectx(ha) - def del_all_files(*args): - raise IOError - # True here meant nuke all files, shouldn't happen with branch closing - if self.commit_branches_empty[branch]: #pragma: no cover - raise hgutil.Abort('Empty commit to an open branch attempted. ' - 'Please report this issue.') - extra = util.build_extra(rev.revnum, branch, self.uuid, self.subdir) - if not self.usebranchnames: - extra.pop('branch', None) - current_ctx = context.memctx(self.repo, - (ha, node.nullid), - rev.message or ' ', - [], - del_all_files, - self.authors[rev.author], - date, - extra) - new_hash = self.repo.commitctx(current_ctx) - util.describe_commit(self.ui, new_hash, branch) - if (rev.revnum, branch) not in self.revmap: - self.add_to_revmap(rev.revnum, branch, new_hash) - - # 3. handle tags - if tbdelta['tags'][0] or tbdelta['tags'][1]: - self.committags(tbdelta['tags'], rev, closebranches) - - # 4. close any branches that need it - for branch in tbdelta['branches'][1]: - # self.get_parent_revision(rev.revnum, branch) - ha = closebranches.get(branch) - if ha is None: - continue - self.delbranch(branch, ha, rev) - - self._save_metadata() - self.clear_current_info() - - def delbranch(self, branch, node, rev): - pctx = self.repo[node] - def filectxfun(repo, memctx, path): - return pctx[path] - files = pctx.manifest().keys() - extra = {'close': 1} - if self.usebranchnames: - extra['branch'] = branch or 'default' - ctx = context.memctx(self.repo, - (node, revlog.nullid), - rev.message or util.default_commit_msg, - files, - filectxfun, - self.authors[rev.author], - self.fixdate(rev.date), - extra) - new = self.repo.commitctx(ctx) - self.ui.status('Marked branch %s as closed.\n' % (branch or 'default')) - def readfilemap(self, filemapfile): - self.ui.note( - ('Reading file map from %s\n') - % filemapfile) - def addpathtomap(path, mapping, mapname): - if path in mapping: - self.ui.warn(('Duplicate %s entry in %s: "%d"\n') % - (mapname, filemapfile, path)) + msg = 'fetching %s files that could not use replay.\n' + self.ui.debug(msg % len(self.missing)) + root = svn.subdir and svn.subdir[1:] or '' + r = self.rev.revnum + + files = set() + for p in self.missing: + self.ui.note('.') + self.ui.flush() + if p[-1] == '/': + dir = p[len(root):] + new = [dir + f for f, k in svn.list_files(dir, r) if k == 'f'] + files.update(new) else: - self.ui.debug(('%sing %s\n') % - (mapname.capitalize().strip('e'), path)) - mapping[path] = path - - f = open(filemapfile, 'r') - for line in f: - if line.strip() == '' or line.strip()[0] == '#': - continue - try: - cmd, path = line.split(' ', 1) - cmd = cmd.strip() - path = path.strip() - if cmd == 'include': - addpathtomap(path, self.includepaths, 'include') - elif cmd == 'exclude': - addpathtomap(path, self.excludepaths, 'exclude') - else: - self.ui.warn( - ('Unknown filemap command %s\n') - % cmd) - except IndexError: - self.ui.warn( - ('Ignoring bad line in filemap %s: %s\n') - % (filemapfile, line.rstrip())) - f.close() - - def meta_data_dir(self): - return os.path.join(self.path, '.hg', 'svn') - meta_data_dir = property(meta_data_dir) - - def meta_file_named(self, name): - return os.path.join(self.meta_data_dir, name) - - def revmap_file(self): - return self.meta_file_named('rev_map') - revmap_file = property(revmap_file) - - def _get_uuid(self): - return open(self.meta_file_named('uuid')).read() - - def _set_uuid(self, uuid): - if not uuid: - return self._get_uuid() - elif os.path.isfile(self.meta_file_named('uuid')): - stored_uuid = self._get_uuid() - assert stored_uuid - if uuid != stored_uuid: - raise hgutil.Abort('unable to operate on unrelated repository') - else: - return stored_uuid - else: - if uuid: - f = open(self.meta_file_named('uuid'), 'w') - f.write(uuid) - f.flush() - f.close() - return self._get_uuid() - else: - raise hgutil.Abort('unable to operate on unrelated repository') - - uuid = property(_get_uuid, _set_uuid, None, - 'Error-checked UUID of source Subversion repository.') - - def branch_info_file(self): - return self.meta_file_named('branch_info') - branch_info_file = property(branch_info_file) - - def tag_locations_file(self): - return self.meta_file_named('tag_locations') - tag_locations_file = property(tag_locations_file) + files.add(p[len(root):]) - def authors_file(self): - return self.meta_file_named('authors') - authors_file = property(authors_file) + i = 1 + self.ui.note('\nfetching files...\n') + for p in files: + self.ui.note('.') + self.ui.flush() + if i % 50 == 0: + svn.init_ra_and_client() + i += 1 + data, mode = svn.get_file(p, r) + self.set(p, data, 'x' in mode, 'l' in mode) - def aresamefiles(self, parentctx, childctx, files): - """Assuming all files exist in childctx and parentctx, return True - if none of them was changed in-between. - """ - if parentctx == childctx: - return True - if parentctx.rev() > childctx.rev(): - parentctx, childctx = childctx, parentctx + self.missing = set() + self.ui.note('\n') - def selfandancestors(selfctx): - yield selfctx - for ctx in selfctx.ancestors(): - yield ctx - files = dict.fromkeys(files) - for pctx in selfandancestors(childctx): - if pctx.rev() <= parentctx.rev(): - return True - for f in pctx.files(): - if f in files: - return False - # parentctx is not an ancestor of childctx, files are unrelated - return False +class HgEditor(delta.Editor): - # Here come all the actual editor methods + def __init__(self, meta): + self.meta = meta + self.ui = meta.ui + self.repo = meta.repo + self.current = RevisionData(meta.ui) @ieditor def delete_entry(self, path, revision_bogus, parent_baton, pool=None): - br_path, branch = self._path_and_branch_for_path(path) + br_path, branch = self.meta.split_branch_path(path)[:2] if br_path == '': - self.branches_to_delete.add(branch) + self.meta.closebranches.add(branch) if br_path is not None: - ha = self.get_parent_revision(self.current_rev.revnum, branch) + ha = self.meta.get_parent_revision(self.current.rev.revnum, branch) if ha == revlog.nullid: return ctx = self.repo.changectx(ha) @@ -879,122 +136,124 @@ class HgChangeReceiver(delta.Editor): if br_path != '': br_path2 = br_path + '/' # assuming it is a directory - self.externals[path] = None - map(self.delete_file, [pat for pat in self.current_files.iterkeys() - if pat.startswith(path+'/')]) + self.current.externals[path] = None + map(self.current.delete, [pat for pat in self.current.files.iterkeys() + if pat.startswith(path+'/')]) for f in ctx.walk(util.PrefixMatch(br_path2)): f_p = '%s/%s' % (path, f[len(br_path2):]) - if f_p not in self.current_files: - self.delete_file(f_p) - self.delete_file(path) + if f_p not in self.current.files: + self.current.delete(f_p) + self.current.delete(path) @ieditor def open_file(self, path, parent_baton, base_revision, p=None): - self.current_file = None - fpath, branch = self._path_and_branch_for_path(path) + self.current.file = None + fpath, branch = self.meta.split_branch_path(path)[:2] if not fpath: self.ui.debug('WARNING: Opening non-existant file %s\n' % path) return - self.current_file = path + self.current.file = path self.ui.note('M %s\n' % path) if base_revision != -1: - self.base_revision = base_revision + self.current.base = base_revision else: - self.base_revision = None + self.current.base = None - if self.current_file in self.current_files: + if self.current.file in self.current.files: return baserev = base_revision if baserev is None or baserev == -1: - baserev = self.current_rev.revnum - 1 - parent = self.get_parent_revision(baserev + 1, branch) + baserev = self.current.rev.revnum - 1 + parent = self.meta.get_parent_revision(baserev + 1, branch) ctx = self.repo[parent] - if not self._is_path_valid(path): + if not self.meta.is_path_valid(path): return if fpath not in ctx: - self.missing_plaintexts.add(path) + self.current.missing.add(path) fctx = ctx.filectx(fpath) base = fctx.data() if 'l' in fctx.flags(): base = 'link ' + base - self.set_file(path, base, 'x' in fctx.flags(), 'l' in fctx.flags()) + self.current.set(path, base, 'x' in fctx.flags(), 'l' in fctx.flags()) @ieditor def add_file(self, path, parent_baton=None, copyfrom_path=None, copyfrom_revision=None, file_pool=None): - self.current_file = None - self.base_revision = None - if path in self.deleted_files: - del self.deleted_files[path] - fpath, branch = self._path_and_branch_for_path(path, existing=False) + self.current.file = None + self.current.base = None + if path in self.current.deleted: + del self.current.deleted[path] + fpath, branch = self.meta.split_branch_path(path, existing=False)[:2] if not fpath: return - if branch not in self.branches: + if (branch not in self.meta.branches and + not self.meta.is_path_tag(self.meta.remotename(branch))): # we know this branch will exist now, because it has at least one file. Rock. - self.branches[branch] = None, 0, self.current_rev.revnum - self.current_file = path + self.meta.branches[branch] = None, 0, self.current.rev.revnum + self.current.file = path if not copyfrom_path: self.ui.note('A %s\n' % path) - self.set_file(path, '', False, False) + self.current.set(path, '', False, False) return self.ui.note('A+ %s\n' % path) (from_file, - from_branch) = self._path_and_branch_for_path(copyfrom_path) + from_branch) = self.meta.split_branch_path(copyfrom_path)[:2] if not from_file: - self.missing_plaintexts.add(path) + self.current.missing.add(path) return - ha = self.get_parent_revision(copyfrom_revision + 1, - from_branch) + ha = self.meta.get_parent_revision(copyfrom_revision + 1, + from_branch) ctx = self.repo.changectx(ha) if from_file in ctx: fctx = ctx.filectx(from_file) flags = fctx.flags() - self.set_file(path, fctx.data(), 'x' in flags, 'l' in flags) + self.current.set(path, fctx.data(), 'x' in flags, 'l' in flags) if from_branch == branch: - parentid = self.get_parent_revision(self.current_rev.revnum, - branch) + parentid = self.meta.get_parent_revision(self.current.rev.revnum, + branch) if parentid != revlog.nullid: parentctx = self.repo.changectx(parentid) - if self.aresamefiles(parentctx, ctx, [from_file]): - self.copies[path] = from_file + if util.aresamefiles(parentctx, ctx, [from_file]): + self.current.copies[path] = from_file @ieditor def add_directory(self, path, parent_baton, copyfrom_path, copyfrom_revision, dir_pool=None): - self.dir_batons[path] = path - br_path, branch = self._path_and_branch_for_path(path) + self.current.batons[path] = path + br_path, branch = self.meta.split_branch_path(path)[:2] if br_path is not None: if not copyfrom_path and not br_path: - self.commit_branches_empty[branch] = True + self.current.emptybranches[branch] = True else: - self.commit_branches_empty[branch] = False + self.current.emptybranches[branch] = False if br_path is None or not copyfrom_path: return path if copyfrom_path: - tag = self._is_path_tag(copyfrom_path) - if tag not in self.tags: + tag = self.meta.is_path_tag(copyfrom_path) + if tag not in self.meta.tags: tag = None - if not self._is_path_valid(copyfrom_path) and not tag: - self.missing_plaintexts.add('%s/' % path) + if not self.meta.is_path_valid(copyfrom_path) and not tag: + self.current.missing.add('%s/' % path) return path if tag: - source_branch, source_rev = self.tags[tag] + ci = self.meta.repo[self.meta.tags[tag]].extra()['convert_revision'] + source_rev, source_branch, = self.meta.parse_converted_revision(ci) cp_f = '' else: source_rev = copyfrom_revision - cp_f, source_branch = self._path_and_branch_for_path(copyfrom_path) + cp_f, source_branch = self.meta.split_branch_path(copyfrom_path)[:2] if cp_f == '' and br_path == '': assert br_path is not None - self.branches[branch] = source_branch, source_rev, self.current_rev.revnum - new_hash = self.get_parent_revision(source_rev + 1, - source_branch) + tmp = source_branch, source_rev, self.current.rev.revnum + self.meta.branches[branch] = tmp + new_hash = self.meta.get_parent_revision(source_rev + 1, source_branch) if new_hash == node.nullid: - self.missing_plaintexts.add('%s/' % path) + self.current.missing.add('%s/' % path) return path cp_f_ctx = self.repo.changectx(new_hash) if cp_f != '/' and cp_f != '': @@ -1008,65 +267,65 @@ class HgChangeReceiver(delta.Editor): f2 = f[len(cp_f):] fctx = cp_f_ctx.filectx(f) fp_c = path + '/' + f2 - self.set_file(fp_c, fctx.data(), 'x' in fctx.flags(), 'l' in fctx.flags()) - if fp_c in self.deleted_files: - del self.deleted_files[fp_c] + self.current.set(fp_c, fctx.data(), 'x' in fctx.flags(), 'l' in fctx.flags()) + if fp_c in self.current.deleted: + del self.current.deleted[fp_c] if branch == source_branch: copies[fp_c] = f if copies: # Preserve the directory copy records if no file was changed between # the source and destination revisions, or discard it completely. - parentid = self.get_parent_revision(self.current_rev.revnum, branch) + parentid = self.meta.get_parent_revision(self.current.rev.revnum, branch) if parentid != revlog.nullid: parentctx = self.repo.changectx(parentid) - if self.aresamefiles(parentctx, cp_f_ctx, copies.values()): - self.copies.update(copies) + if util.aresamefiles(parentctx, cp_f_ctx, copies.values()): + self.current.copies.update(copies) return path @ieditor def change_file_prop(self, file_baton, name, value, pool=None): if name == 'svn:executable': - self.current_files_exec[self.current_file] = bool(value is not None) + self.current.execfiles[self.current.file] = bool(value is not None) elif name == 'svn:special': - self.current_files_symlink[self.current_file] = bool(value is not None) + self.current.symlinks[self.current.file] = bool(value is not None) @ieditor def change_dir_prop(self, dir_baton, name, value, pool=None): if dir_baton is None: return - path = self.dir_batons[dir_baton] + path = self.current.batons[dir_baton] if name == 'svn:externals': - self.externals[path] = value + self.current.externals[path] = value @ieditor def open_directory(self, path, parent_baton, base_revision, dir_pool=None): - self.dir_batons[path] = path - p_, branch = self._path_and_branch_for_path(path) + self.current.batons[path] = path + p_, branch = self.meta.split_branch_path(path)[:2] if p_ == '': - self.commit_branches_empty[branch] = False + self.current.emptybranches[branch] = False return path @ieditor def close_directory(self, dir_baton, dir_pool=None): if dir_baton is not None: - del self.dir_batons[dir_baton] + del self.current.batons[dir_baton] @ieditor def apply_textdelta(self, file_baton, base_checksum, pool=None): # We know coming in here the file must be one of the following options: # 1) Deleted (invalid, fail an assertion) # 2) Missing a base text (bail quick since we have to fetch a full plaintext) - # 3) Has a base text in self.current_files, apply deltas + # 3) Has a base text in self.current.files, apply deltas base = '' - if not self._is_path_valid(self.current_file): + if not self.meta.is_path_valid(self.current.file): return lambda x: None - assert self.current_file not in self.deleted_files, ( - 'Cannot apply_textdelta to a deleted file: %s' % self.current_file) - assert (self.current_file in self.current_files - or self.current_file in self.missing_plaintexts), '%s not found' % self.current_file - if self.current_file in self.missing_plaintexts: + assert self.current.file not in self.current.deleted, ( + 'Cannot apply_textdelta to a deleted file: %s' % self.current.file) + assert (self.current.file in self.current.files + or self.current.file in self.current.missing), '%s not found' % self.current.file + if self.current.file in self.current.missing: return lambda x: None - base = self.current_files[self.current_file] + base = self.current.files[self.current.file] source = cStringIO.StringIO(base) target = cStringIO.StringIO() self.stream = target @@ -1077,19 +336,19 @@ class HgChangeReceiver(delta.Editor): 'cannot call handler!') def txdelt_window(window): try: - if not self._is_path_valid(self.current_file): + if not self.meta.is_path_valid(self.current.file): return handler(window, baton) # window being None means commit this file if not window: - self.current_files[self.current_file] = target.getvalue() + self.current.files[self.current.file] = target.getvalue() except core.SubversionException, e: #pragma: no cover if e.apr_err == core.SVN_ERR_INCOMPLETE_DATA: - self.missing_plaintexts.add(self.current_file) + self.current.missing.add(self.current.file) else: #pragma: no cover raise hgutil.Abort(*e.args) except: #pragma: no cover - print len(base), self.current_file + print len(base), self.current.file self._exception_info = sys.exc_info() raise return txdelt_window diff --git a/hgsubversion/maps.py b/hgsubversion/maps.py --- a/hgsubversion/maps.py +++ b/hgsubversion/maps.py @@ -2,6 +2,9 @@ import os from mercurial import util as hgutil +from mercurial import node + +import svncommands class AuthorMap(dict): '''A mapping from Subversion-style authors to Mercurial-style @@ -33,10 +36,18 @@ class AuthorMap(dict): ''' Load mappings from a file at the specified path. ''' if not os.path.exists(path): return + + writing = False + if path != self.path: + writing = open(self.path, 'a') + self.ui.note('reading authormap from %s\n' % path) f = open(path, 'r') for number, line in enumerate(f): + if writing: + writing.write(line) + line = line.split('#')[0] if not line.strip(): continue @@ -50,23 +61,16 @@ class AuthorMap(dict): src = src.strip() dst = dst.strip() + self.ui.debug('adding author %s to author map\n' % src) if src in self and dst != self[src]: msg = 'overriding author: "%s" to "%s" (%s)\n' self.ui.warn(msg % (self[src], dst, src)) - else: - self[src] = dst - - f.close() + self[src] = dst - def __setitem__(self, key, value): - ''' Similar to dict.__setitem__, but also updates the new mapping in the - backing store. ''' - self.super.__setitem__(key, value) - self.ui.debug('adding author %s to author map\n' % self.path) - f = open(self.path, 'w+') - for k, v in self.iteritems(): - f.write("%s=%s\n" % (k, v)) f.close() + if writing: + writing.flush() + writing.close() def __getitem__(self, author): ''' Similar to dict.__getitem__, except in case of an unknown author. @@ -91,3 +95,192 @@ class AuthorMap(dict): else: # Mercurial incorrectly splits at e.g. '.', so we roll our own. return author.rsplit('@', 1)[0] + + +class TagMap(dict): + + VERSION = 2 + + @classmethod + def filepath(cls, repo): + return os.path.join(repo.path, 'svn', 'tagmap') + + def __init__(self, repo, endrev=None): + dict.__init__(self) + self.path = self.filepath(repo) + self.seen = 0 + self.endrev=endrev + if os.path.isfile(self.path): + self._load(repo) + else: + self._write() + + def _load(self, repo): + f = open(self.path) + ver = int(f.readline()) + if ver < self.VERSION: + repo.ui.warn('tag map outdated, running rebuildmeta...\n') + f.close() + os.unlink(self.path) + svncommands.rebuildmeta(repo.ui, repo, os.path.dirname(repo.path), ()) + return + elif ver != self.VERSION: + print 'tagmap too new -- please upgrade' + raise NotImplementedError + for l in f: + hash, revision, tag = l.split(' ', 2) + revision = int(revision) + tag = tag[:-1] + if self.endrev is not None and revision > self.endrev: + break + dict.__setitem__(self, tag, node.bin(hash)) + f.close() + + def _write(self): + assert self.endrev is None + f = open(self.path, 'w') + f.write('%s\n' % self.VERSION) + f.flush() + f.close() + + def update(self, other): + for k,v in other.iteritems(): + self[k] = v + + def __contains__(self, tag): + return dict.__contains__(self, tag) and dict.__getitem__(self, tag) != node.nullid + + def __getitem__(self, tag): + if tag in self: + return dict.__getitem__(self, tag) + raise KeyError() + + def __setitem__(self, tag, info): + hash, revision = info + f = open(self.path, 'a') + f.write('%s %s %s\n' % (node.hex(hash), revision, tag)) + f.flush() + f.close() + dict.__setitem__(self, tag, hash) + + +class RevMap(dict): + + VERSION = 1 + + def __init__(self, repo): + dict.__init__(self) + self.path = os.path.join(repo.path, 'svn', 'rev_map') + self.seen = 0 + if os.path.isfile(self.path): + self._load() + else: + self._write() + + def hashes(self): + return dict((v, k) for (k, v) in self.iteritems()) + + def branchedits(self, branch, rev): + check = lambda x: x[0][1] == branch and x[0][0] < rev.revnum + return sorted(filter(check, self.iteritems()), reverse=True) + + def _load(self): + f = open(self.path) + ver = int(f.readline()) + if ver != self.VERSION: + print 'revmap too new -- please upgrade' + raise NotImplementedError + for l in f: + revnum, hash, branch = l.split(' ', 2) + if branch == '\n': + branch = None + else: + branch = branch[:-1] + revnum = int(revnum) + self.seen = max(self.seen, revnum) + dict.__setitem__(self, (revnum, branch), node.bin(hash)) + f.close() + + def _write(self): + f = open(self.path, 'w') + f.write('%s\n' % self.VERSION) + f.flush() + f.close() + + def __setitem__(self, key, hash): + revnum, branch = key + f = open(self.path, 'a') + b = branch or '' + f.write(str(revnum) + ' ' + node.hex(hash) + ' ' + b + '\n') + f.flush() + f.close() + self.seen = max(self.seen, revnum) + dict.__setitem__(self, (revnum, branch), hash) + + +class FileMap(object): + + def __init__(self, repo): + self.ui = repo.ui + self.include = {} + self.exclude = {} + filemap = repo.ui.config('hgsubversion', 'filemap') + if filemap and os.path.exists(filemap): + self.load(filemap) + + def _rpairs(self, name): + yield '.', name + e = len(name) + while e != -1: + yield name[:e], name[e+1:] + e = name.rfind('/', 0, e) + + def check(self, map, path): + map = getattr(self, map) + for pre, suf in self._rpairs(path): + if pre not in map: + continue + return map[pre] + return None + + def __contains__(self, path): + if len(self.include) and len(path): + inc = self.check('include', path) + else: + inc = path + if len(self.exclude) and len(path): + exc = self.check('exclude', path) + else: + exc = None + if inc is None or exc is not None: + return False + return True + + def add(self, fn, map, path): + mapping = getattr(self, map) + if path in mapping: + msg = 'duplicate %s entry in %s: "%d"\n' + self.ui.warn(msg % (map, fn, path)) + return + bits = map.strip('e'), path + self.ui.debug('%sing %s\n' % bits) + mapping[path] = path + + def load(self, fn): + self.ui.note('reading file map from %s\n' % fn) + f = open(fn, 'r') + for line in f: + if line.strip() == '' or line.strip()[0] == '#': + continue + try: + cmd, path = line.split(' ', 1) + cmd = cmd.strip() + path = path.strip() + if cmd in ('include', 'exclude'): + self.add(fn, cmd, path) + continue + self.ui.warn('unknown filemap command %s\n' % cmd) + except IndexError: + msg = 'ignoring bad line in filemap %s: %s\n' + self.ui.warn(msg % (fn, line.rstrip())) + f.close() diff --git a/hgsubversion/cmdutil.py b/hgsubversion/pushmod.py rename from hgsubversion/cmdutil.py rename to hgsubversion/pushmod.py --- a/hgsubversion/cmdutil.py +++ b/hgsubversion/pushmod.py @@ -1,93 +1,17 @@ #!/usr/bin/python -import re -import os -import urllib from mercurial import util as hgutil from svn import core -import util import svnwrap import svnexternals -b_re = re.compile(r'^\+\+\+ b\/([^\n]*)', re.MULTILINE) -a_re = re.compile(r'^--- a\/([^\n]*)', re.MULTILINE) -devnull_re = re.compile(r'^([-+]{3}) /dev/null', re.MULTILINE) -header_re = re.compile(r'^diff --git .* b\/(.*)', re.MULTILINE) -newfile_devnull_re = re.compile(r'^--- /dev/null\n\+\+\+ b/([^\n]*)', - re.MULTILINE) - - class NoFilesException(Exception): """Exception raised when you try and commit without files. """ -def formatrev(rev): - if rev == -1: - return '\t(working copy)' - return '\t(revision %d)' % rev - - -def filterdiff(diff, oldrev, newrev): - diff = newfile_devnull_re.sub(r'--- \1\t(revision 0)' '\n' - r'+++ \1\t(working copy)', - diff) - oldrev = formatrev(oldrev) - newrev = formatrev(newrev) - diff = a_re.sub(r'--- \1'+ oldrev, diff) - diff = b_re.sub(r'+++ \1' + newrev, diff) - diff = devnull_re.sub(r'\1 /dev/null\t(working copy)', diff) - diff = header_re.sub(r'Index: \1' + '\n' + ('=' * 67), diff) - return diff - - -def parentrev(ui, repo, hge, svn_commit_hashes): - """Find the svn parent revision of the repo's dirstate. - """ - workingctx = repo.parents()[0] - outrev = util.outgoing_revisions(ui, repo, hge, svn_commit_hashes, - workingctx.node()) - if outrev: - workingctx = repo[outrev[-1]].parents()[0] - return workingctx - - -def replay_convert_rev(ui, hg_editor, svn, r, tbdelta): - # ui is only passed in for similarity with stupid.convert_rev() - hg_editor.set_current_rev(r) - hg_editor.save_tbdelta(tbdelta) # needed by get_replay() - svn.get_replay(r.revnum, hg_editor) - i = 1 - if hg_editor.missing_plaintexts: - hg_editor.ui.debug('Fetching %s files that could not use replay.\n' % - len(hg_editor.missing_plaintexts)) - files_to_grab = set() - rootpath = svn.subdir and svn.subdir[1:] or '' - for p in hg_editor.missing_plaintexts: - hg_editor.ui.note('.') - hg_editor.ui.flush() - if p[-1] == '/': - dirpath = p[len(rootpath):] - files_to_grab.update([dirpath + f for f,k in - svn.list_files(dirpath, r.revnum) - if k == 'f']) - else: - files_to_grab.add(p[len(rootpath):]) - hg_editor.ui.note('\nFetching files...\n') - for p in files_to_grab: - hg_editor.ui.note('.') - hg_editor.ui.flush() - if i % 50 == 0: - svn.init_ra_and_client() - i += 1 - data, mode = svn.get_file(p, r.revnum) - hg_editor.set_file(p, data, 'x' in mode, 'l' in mode) - hg_editor.missing_plaintexts = set() - hg_editor.ui.note('\n') - hg_editor.commit_current_delta(tbdelta) - def _isdir(svn, branchpath, svndir): try: @@ -168,12 +92,10 @@ def _externals(ctx): return ext -def commit_from_rev(ui, repo, rev_ctx, hg_editor, svn_url, base_revision, - username, password): +def commit(ui, repo, rev_ctx, meta, base_revision, svn): """Build and send a commit from Mercurial to Subversion. """ file_data = {} - svn = svnwrap.SubversionRepo(svn_url, username, password) parent = rev_ctx.parents()[0] parent_branch = rev_ctx.parents()[0].branch() branch_path = 'trunk' @@ -284,24 +206,3 @@ def commit_from_rev(ui, repo, rev_ctx, h raise return True - -def islocalrepo(url): - if not url.startswith('file:///'): - return False - if '#' in url.split('/')[-1]: # strip off #anchor - url = url[:url.rfind('#')] - path = url[len('file://'):] - path = urllib.url2pathname(path).replace(os.sep, '/') - while '/' in path: - if reduce(lambda x,y: x and y, - map(lambda p: os.path.exists(os.path.join(path, p)), - ('hooks', 'format', 'db', ))): - return True - path = path.rsplit('/', 1)[0] - return False - -def issvnurl(url): - for scheme in ('svn', 'http', 'https', 'svn+ssh'): - if url.startswith(scheme + '://'): - return True - return islocalrepo(url) diff --git a/hgsubversion/replay.py b/hgsubversion/replay.py new file mode 100644 --- /dev/null +++ b/hgsubversion/replay.py @@ -0,0 +1,209 @@ +import traceback + +from mercurial import revlog +from mercurial import node +from mercurial import context +from mercurial import util as hgutil + +import svnexternals +import util + + +class MissingPlainTextError(Exception): + """Exception raised when the repo lacks a source file required for replaying + a txdelta. + """ + +class ReplayException(Exception): + """Exception raised when you try and commit but the replay encountered an + exception. + """ + +def convert_rev(ui, meta, svn, r, tbdelta): + + editor = meta.editor + editor.current.clear() + editor.current.rev = r + svn.get_replay(r.revnum, editor) + current = editor.current + current.findmissing(svn) + + # update externals + + if current.externals: + + # accumulate externals records for all branches + revnum = current.rev.revnum + branches = {} + for path, entry in current.externals.iteritems(): + + if not meta.is_path_valid(path): + ui.warn('WARNING: Invalid path %s in externals\n' % path) + continue + + p, b, bp = meta.split_branch_path(path) + if bp not in branches: + external = svnexternals.externalsfile() + parent = meta.get_parent_revision(revnum, b) + pctx = meta.repo[parent] + if '.hgsvnexternals' in pctx: + external.read(pctx['.hgsvnexternals'].data()) + branches[bp] = external + else: + external = branches[bp] + + external[p] = entry + + # register externals file changes + for bp, external in branches.iteritems(): + path = bp + '/.hgsvnexternals' + if external: + current.set(path, external.write(), False, False) + else: + current.delete(path) + + if current.exception is not None: #pragma: no cover + traceback.print_exception(*current.exception) + raise ReplayException() + if current.missing: + raise MissingPlainTextError() + + # paranoidly generate the list of files to commit + files_to_commit = set(current.files.keys()) + files_to_commit.update(current.symlinks.keys()) + files_to_commit.update(current.execfiles.keys()) + files_to_commit.update(current.deleted.keys()) + # back to a list and sort so we get sane behavior + files_to_commit = list(files_to_commit) + files_to_commit.sort() + branch_batches = {} + rev = current.rev + date = meta.fixdate(rev.date) + + # build up the branches that have files on them + for f in files_to_commit: + if not meta.is_path_valid(f): + continue + p, b = meta.split_branch_path(f)[:2] + if b not in branch_batches: + branch_batches[b] = [] + branch_batches[b].append((p, f)) + + closebranches = {} + for branch in tbdelta['branches'][1]: + branchedits = meta.revmap.branchedits(branch, rev) + if len(branchedits) < 1: + # can't close a branch that never existed + continue + ha = branchedits[0][1] + closebranches[branch] = ha + + # 1. handle normal commits + closedrevs = closebranches.values() + for branch, files in branch_batches.iteritems(): + + if branch in current.emptybranches and files: + del current.emptybranches[branch] + + files = dict(files) + parents = meta.get_parent_revision(rev.revnum, branch), revlog.nullid + if parents[0] in closedrevs and branch in meta.closebranches: + continue + + extra = meta.genextra(rev.revnum, branch) + tag = False + if branch is not None: + tag = meta.is_path_tag(meta.remotename(branch)) + if (not (tag and tag in meta.tags) and + (branch not in meta.branches + and branch not in meta.repo.branchtags())): + continue + + parentctx = meta.repo.changectx(parents[0]) + if tag: + if parentctx.node() == node.nullid: + continue + extra.update({'branch': parentctx.extra().get('branch', None), + 'close': 1}) + + if '.hgsvnexternals' not in parentctx and '.hgsvnexternals' in files: + # Do not register empty externals files + if (files['.hgsvnexternals'] in current.files + and not current.files[files['.hgsvnexternals']]): + del files['.hgsvnexternals'] + + def filectxfn(repo, memctx, path): + current_file = files[path] + if current_file in current.deleted: + raise IOError() + copied = current.copies.get(current_file) + flags = parentctx.flags(path) + is_exec = current.execfiles.get(current_file, 'x' in flags) + is_link = current.symlinks.get(current_file, 'l' in flags) + if current_file in current.files: + data = current.files[current_file] + if is_link and data.startswith('link '): + data = data[len('link '):] + elif is_link: + ui.warn('file marked as link, but contains data: ' + '%s (%r)\n' % (current_file, flags)) + else: + data = parentctx.filectx(path).data() + return context.memfilectx(path=path, + data=data, + islink=is_link, isexec=is_exec, + copied=copied) + + if not meta.usebranchnames or extra.get('branch', None) == 'default': + extra.pop('branch', None) + current_ctx = context.memctx(meta.repo, + parents, + rev.message or '...', + files.keys(), + filectxfn, + meta.authors[rev.author], + date, + extra) + + new_hash = meta.repo.commitctx(current_ctx) + util.describe_commit(ui, new_hash, branch) + if (rev.revnum, branch) not in meta.revmap and not tag: + meta.revmap[rev.revnum, branch] = new_hash + if tag: + meta.movetag(tag, new_hash, parentctx.extra().get('branch', None), rev, date) + + # 2. handle branches that need to be committed without any files + for branch in current.emptybranches: + + ha = meta.get_parent_revision(rev.revnum, branch) + if ha == node.nullid: + continue + + parent_ctx = meta.repo.changectx(ha) + def del_all_files(*args): + raise IOError + + # True here meant nuke all files, shouldn't happen with branch closing + if current.emptybranches[branch]: #pragma: no cover + raise hgutil.Abort('Empty commit to an open branch attempted. ' + 'Please report this issue.') + + extra = meta.genextra(rev.revnum, branch) + + if not meta.usebranchnames: + extra.pop('branch', None) + + current_ctx = context.memctx(meta.repo, + (ha, node.nullid), + rev.message or ' ', + [], + del_all_files, + meta.authors[rev.author], + date, + extra) + new_hash = meta.repo.commitctx(current_ctx) + util.describe_commit(ui, new_hash, branch) + if (rev.revnum, branch) not in meta.revmap: + meta.revmap[rev.revnum, branch] = new_hash + + return closebranches diff --git a/hgsubversion/stupid.py b/hgsubversion/stupid.py --- a/hgsubversion/stupid.py +++ b/hgsubversion/stupid.py @@ -55,8 +55,8 @@ def mempatchproxy(parentctx, files): patchfile = patch.patchfile class mempatch(patchfile): - def __init__(self, ui, fname, opener, missing=False): - patchfile.__init__(self, ui, fname, None, False) + def __init__(self, ui, fname, opener, missing=False, eol=None): + patchfile.__init__(self, ui, fname, None, False, eol) def readlines(self, fname): if fname not in parentctx: @@ -76,13 +76,15 @@ def mempatchproxy(parentctx, files): return mempatch -def filteriterhunks(hg_editor): +def filteriterhunks(meta): iterhunks = patch.iterhunks - def filterhunks(ui, fp, sourcefile=None): + def filterhunks(ui, fp, sourcefile=None, textmode=False): applycurrent = False - for data in iterhunks(ui, fp, sourcefile): + # Passing False instead of textmode because we should never + # be ignoring EOL type. + for data in iterhunks(ui, fp, sourcefile, False): if data[0] == 'file': - if hg_editor._is_file_included(data[1][1]): + if data[1][1] in meta.filemap: applycurrent = True else: applycurrent = False @@ -92,7 +94,7 @@ def filteriterhunks(hg_editor): return filterhunks -def diff_branchrev(ui, svn, hg_editor, branch, r, parentctx): +def diff_branchrev(ui, svn, meta, branch, r, parentctx): """Extract all 'branch' content at a given revision. Return a tuple (files, filectxfn) where 'files' is the list of all files @@ -106,7 +108,7 @@ def diff_branchrev(ui, svn, hg_editor, b elif branch.startswith('../'): return branch[3:] return 'branches/%s' % branch - parent_rev, br_p = hg_editor.get_parent_svn_branch_and_rev(r.revnum, branch) + parent_rev, br_p = meta.get_parent_svn_branch_and_rev(r.revnum, branch) diff_path = make_diff_path(branch) try: if br_p == branch: @@ -149,7 +151,7 @@ def diff_branchrev(ui, svn, hg_editor, b oldpatchfile = patch.patchfile olditerhunks = patch.iterhunks patch.patchfile = mempatchproxy(parentctx, files_data) - patch.iterhunks = filteriterhunks(hg_editor) + patch.iterhunks = filteriterhunks(meta) try: # We can safely ignore the changed list since we are # handling non-git patches. Touched files are known @@ -211,7 +213,7 @@ def diff_branchrev(ui, svn, hg_editor, b for f in files_data: touched_files[f] = 1 - copies = getcopies(svn, hg_editor, branch, diff_path, r, touched_files, + copies = getcopies(svn, meta, branch, diff_path, r, touched_files, parentctx) def filectxfn(repo, memctx, path): @@ -282,7 +284,7 @@ def makecopyfinder(r, branchpath, rootdi return finder -def getcopies(svn, hg_editor, branch, branchpath, r, files, parentctx): +def getcopies(svn, meta, branch, branchpath, r, files, parentctx): """Return a mapping {dest: source} for every file copied into r. """ if parentctx.node() == revlog.nullid: @@ -307,10 +309,10 @@ def getcopies(svn, hg_editor, branch, br def getctx(svnrev): if svnrev in ctxs: return ctxs[svnrev] - changeid = hg_editor.get_parent_revision(svnrev + 1, branch) + changeid = meta.get_parent_revision(svnrev + 1, branch) ctx = None if changeid != revlog.nullid: - ctx = hg_editor.repo.changectx(changeid) + ctx = meta.repo.changectx(changeid) ctxs[svnrev] = ctx return ctx @@ -321,7 +323,7 @@ def getcopies(svn, hg_editor, branch, br if sourcectx is None: continue sources = [s[1] for s in copies] - if not hg_editor.aresamefiles(sourcectx, parentctx, sources): + if not util.aresamefiles(sourcectx, parentctx, sources): continue hgcopies.update(copies) return hgcopies @@ -378,7 +380,7 @@ def fetch_externals(svn, branchpath, r, return externals -def fetch_branchrev(svn, hg_editor, branch, branchpath, r, parentctx): +def fetch_branchrev(svn, meta, branch, branchpath, r, parentctx): """Extract all 'branch' content at a given revision. Return a tuple (files, filectxfn) where 'files' is the list of all files @@ -396,7 +398,7 @@ def fetch_branchrev(svn, hg_editor, bran for path, e in r.paths.iteritems(): if not path.startswith(branchprefix): continue - if not hg_editor._is_path_valid(path): + if not meta.is_path_valid(path): continue kind = svn.checkpath(path, r.revnum) path = path[len(branchprefix):] @@ -418,7 +420,7 @@ def fetch_branchrev(svn, hg_editor, bran deleted = [f for f in parentctx if f.startswith(path)] files += deleted - copies = getcopies(svn, hg_editor, branch, branchpath, r, files, parentctx) + copies = getcopies(svn, meta, branch, branchpath, r, files, parentctx) def filectxfn(repo, memctx, path): data, mode = svn.get_file(branchpath + '/' + path, r.revnum) @@ -430,32 +432,32 @@ def fetch_branchrev(svn, hg_editor, bran return files, filectxfn -def checkbranch(hg_editor, r, branch): - branchedits = hg_editor.branchedits(branch, r) +def checkbranch(meta, r, branch): + branchedits = meta.revmap.branchedits(branch, r) if not branchedits: return None branchtip = branchedits[0][1] - for child in hg_editor.repo[branchtip].children(): + for child in meta.repo[branchtip].children(): b = child.branch() != 'default' and child.branch() or None if b == branch and child.extra().get('close'): return None return branchtip -def branches_in_paths(hge, paths, revnum, checkpath, listdir): +def branches_in_paths(meta, tbdelta, paths, revnum, checkpath, listdir): '''Given a list of paths, return mapping of all branches touched to their branch path. ''' branches = {} paths_need_discovery = [] for p in paths: - relpath, branch, branchpath = hge._split_branch_path(p) + relpath, branch, branchpath = meta.split_branch_path(p) if relpath is not None: branches[branch] = branchpath - elif paths[p].action == 'D' and not hge._is_path_tag(p): - ln = hge._localname(p) + elif paths[p].action == 'D' and not meta.is_path_tag(p): + ln = meta.localname(p) # must check in branches_to_delete as well, because this runs after we # already updated the branch map - if ln in hge.branches or ln in hge.branches_to_delete: + if ln in meta.branches or ln in tbdelta['branches'][1]: branches[ln] = p else: paths_need_discovery.append(p) @@ -497,12 +499,12 @@ def branches_in_paths(hge, paths, revnum path = filepaths.pop(0) parentdir = '/'.join(path[:-1]) filepaths = [p for p in filepaths if not '/'.join(p).startswith(parentdir)] - branchpath = hge._normalize_path(parentdir) + branchpath = meta.normalize(parentdir) if branchpath.startswith('tags/'): continue - branchname = hge._localname(branchpath) + branchname = meta.localname(branchpath) if branchpath.startswith('trunk/'): - branches[hge._localname('trunk')] = 'trunk' + branches[meta.localname('trunk')] = 'trunk' continue if branchname and branchname.startswith('../'): continue @@ -510,11 +512,11 @@ def branches_in_paths(hge, paths, revnum return branches -def convert_rev(ui, hg_editor, svn, r, tbdelta): +def convert_rev(ui, meta, svn, r, tbdelta): # this server fails at replay - hg_editor.save_tbdelta(tbdelta) - branches = branches_in_paths(hg_editor, r.paths, r.revnum, svn.checkpath, svn.list_files) + branches = branches_in_paths(meta, tbdelta, r.paths, r.revnum, + svn.checkpath, svn.list_files) brpaths = branches.values() bad_branch_paths = {} for br, bp in branches.iteritems(): @@ -528,103 +530,115 @@ def convert_rev(ui, hg_editor, svn, r, t # We've go a branch that contains other branches. We have to be careful to # get results similar to real replay in this case. - for existingbr in hg_editor.branches: - bad = hg_editor._remotename(existingbr) + for existingbr in meta.branches: + bad = meta.remotename(existingbr) if bad.startswith(bp) and len(bad) > len(bp): bad_branch_paths[br].append(bad[len(bp)+1:]) deleted_branches = {} for p in r.paths: - if hg_editor._is_path_tag(p): + if meta.is_path_tag(p): continue - branch = hg_editor._localname(p) - if not (r.paths[p].action == 'R' and branch in hg_editor.branches): + branch = meta.localname(p) + if not (r.paths[p].action == 'R' and branch in meta.branches): continue - closed = checkbranch(hg_editor, r, branch) + closed = checkbranch(meta, r, branch) if closed is not None: deleted_branches[branch] = closed - date = hg_editor.fixdate(r.date) - check_deleted_branches = set() + date = meta.fixdate(r.date) + check_deleted_branches = set(tbdelta['branches'][1]) for b in branches: - parentctx = hg_editor.repo[hg_editor.get_parent_revision(r.revnum, b)] + + parentctx = meta.repo[meta.get_parent_revision(r.revnum, b)] if parentctx.branch() != (b or 'default'): check_deleted_branches.add(b) + kind = svn.checkpath(branches[b], r.revnum) if kind != 'd': # Branch does not exist at this revision. Get parent revision and # remove everything. deleted_branches[b] = parentctx.node() continue - else: - try: - files_touched, filectxfn2 = diff_branchrev( - ui, svn, hg_editor, b, r, parentctx) - except BadPatchApply, e: - # Either this revision or the previous one does not exist. - ui.status("Fetching entire revision: %s.\n" % e.args[0]) - files_touched, filectxfn2 = fetch_branchrev( - svn, hg_editor, b, branches[b], r, parentctx) - - externals = fetch_externals(svn, branches[b], r, parentctx) - if externals is not None: - files_touched.append('.hgsvnexternals') - - def filectxfn(repo, memctx, path): - if path == '.hgsvnexternals': - if not externals: - raise IOError() - return context.memfilectx(path=path, data=externals.write(), - islink=False, isexec=False, copied=None) - for bad in bad_branch_paths[b]: - if path.startswith(bad): - raise IOError() - return filectxfn2(repo, memctx, path) - - extra = util.build_extra(r.revnum, b, svn.uuid, svn.subdir) + + try: + files_touched, filectxfn2 = diff_branchrev( + ui, svn, meta, b, r, parentctx) + except BadPatchApply, e: + # Either this revision or the previous one does not exist. + ui.status("Fetching entire revision: %s.\n" % e.args[0]) + files_touched, filectxfn2 = fetch_branchrev( + svn, meta, b, branches[b], r, parentctx) + + externals = fetch_externals(svn, branches[b], r, parentctx) + if externals is not None: + files_touched.append('.hgsvnexternals') + + def filectxfn(repo, memctx, path): + if path == '.hgsvnexternals': + if not externals: + raise IOError() + return context.memfilectx(path=path, data=externals.write(), + islink=False, isexec=False, copied=None) + for bad in bad_branch_paths[b]: + if path.startswith(bad): + raise IOError() + return filectxfn2(repo, memctx, path) + if '' in files_touched: files_touched.remove('') - excluded = [f for f in files_touched - if not hg_editor._is_file_included(f)] + excluded = [f for f in files_touched if f not in meta.filemap] for f in excluded: files_touched.remove(f) - if parentctx.node() != node.nullid or files_touched: - for f in files_touched: - if f: - # this is a case that really shouldn't ever happen, it means something - # is very wrong - assert f[0] != '/' - current_ctx = context.memctx(hg_editor.repo, - [parentctx.node(), revlog.nullid], - r.message or util.default_commit_msg, - files_touched, - filectxfn, - hg_editor.authors[r.author], - date, - extra) - branch = extra.get('branch', None) - if not hg_editor.usebranchnames: - extra.pop('branch', None) - ha = hg_editor.repo.commitctx(current_ctx) - if not branch in hg_editor.branches: - hg_editor.branches[branch] = None, 0, r.revnum - hg_editor.add_to_revmap(r.revnum, b, ha) - util.describe_commit(ui, ha, b) + + if parentctx.node() == node.nullid and not files_touched: + continue + + for f in files_touched: + if f: + # this is a case that really shouldn't ever happen, it means + # something is very wrong + assert f[0] != '/' + + extra = meta.genextra(r.revnum, b) + + tag = False + tag = meta.is_path_tag(meta.remotename(b)) + + if tag: + if parentctx.node() == node.nullid: + continue + extra.update({'branch': parentctx.extra().get('branch', None), + 'close': 1}) + + if not meta.usebranchnames or extra.get('branch', None) == 'default': + extra.pop('branch', None) + current_ctx = context.memctx(meta.repo, + [parentctx.node(), revlog.nullid], + r.message or util.default_commit_msg, + files_touched, + filectxfn, + meta.authors[r.author], + date, + extra) + ha = meta.repo.commitctx(current_ctx) + + branch = extra.get('branch', None) + if not tag: + if (not branch in meta.branches + and not meta.is_path_tag(meta.remotename(branch))): + print tag, 'madebranch', branch + meta.branches[branch] = None, 0, r.revnum + meta.revmap[r.revnum, b] = ha + else: + meta.movetag(tag, ha, parentctx.extra().get('branch', None), r, date) + util.describe_commit(ui, ha, b) # These are branches with an 'R' status in svn log. This means they were # replaced by some other branch, so we need to verify they get marked as closed. for branch in check_deleted_branches: - closed = checkbranch(hg_editor, r, branch) + closed = checkbranch(meta, r, branch) if closed is not None: deleted_branches[branch] = closed - if tbdelta['tags'][0] or tbdelta['tags'][1]: - hg_editor.committags(tbdelta['tags'], r, deleted_branches) - - for b, parent in deleted_branches.iteritems(): - if parent == node.nullid: - continue - hg_editor.delbranch(b, parent, r) - - # save the changed metadata - hg_editor._save_metadata() + return deleted_branches diff --git a/hgsubversion/svncommands.py b/hgsubversion/svncommands.py --- a/hgsubversion/svncommands.py +++ b/hgsubversion/svncommands.py @@ -5,67 +5,72 @@ from mercurial import hg from mercurial import node from mercurial import util as hgutil - -import hg_delta_editor +import maps import svnwrap +import svnrepo import util import utility_commands import svnexternals -def incoming(ui, svn_url, hg_repo_path, skipto_rev=0, stupid=None, - tag_locations='tags', authors=None, filemap=None, **opts): - """show incoming revisions from Subversion - """ - svn_url = util.normalize_url(svn_url) - - initializing_repo = False - user, passwd = util.getuserpass(opts) - svn = svnwrap.SubversionRepo(svn_url, user, passwd) - author_host = ui.config('hgsubversion', 'defaulthost', svn.uuid) - tag_locations = tag_locations.split(',') - hg_editor = hg_delta_editor.HgChangeReceiver(hg_repo_path, - ui_=ui, - subdir=svn.subdir, - author_host=author_host, - tag_locations=tag_locations, - authors=authors, - filemap=filemap, - uuid=svn.uuid) - start = max(hg_editor.last_known_revision(), skipto_rev) - initializing_repo = (hg_editor.last_known_revision() <= 0) - - if initializing_repo and start > 0: - raise hgutil.Abort('Revision skipping at repository initialization ' - 'remains unimplemented.') - - rev_stuff = (('revision', 'revnum'), - ('user', 'author'), - ('date', 'date'), - ('message', 'message') - ) - - ui.status('incoming changes from %s\n' % svn_url) - - for r in svn.revisions(start=start): - ui.status('\n') - for label, attr in rev_stuff: - l1 = label+':' - ui.status('%s%s\n' % (l1.ljust(13), - str(r.__getattribute__(attr)).strip(), )) +def verify(ui, repo, *args, **opts): + '''verify current revision against Subversion repository + ''' + ctx = repo[opts.get('rev', '.')] + if 'close' in ctx.extra(): + ui.write('cannot verify closed branch') + return 0 + srev = ctx.extra().get('convert_revision') + if srev is None: + raise hgutil.Abort('revision %s not from SVN' % ctx) + + srev = int(srev.split('@')[1]) + ui.write('verifying %s against r%i\n' % (ctx, srev)) + + + url = repo.ui.expandpath('default') + if args: + url = args[0] + svn = svnrepo.svnremoterepo(ui, url).svn + + btypes = {'default': 'trunk'} + branchpath = btypes.get(ctx.branch(), 'branches/%s' % ctx.branch()) + svnfiles = set() + result = 0 + for fn, type in svn.list_files(branchpath, srev): + if type != 'f': + continue + svnfiles.add(fn) + data, mode = svn.get_file(branchpath + '/' + fn, srev) + fctx = ctx[fn] + dmatch = fctx.data() == data + mmatch = fctx.flags() == mode + if not (dmatch and mmatch): + ui.write('difference in file %s' % fn) + result = 1 + + hgfiles = set(ctx) + hgfiles.discard('.hgtags') + hgfiles.discard('.hgsvnexternals') + if hgfiles != svnfiles: + missing = set(hgfiles).symmetric_difference(svnfiles) + ui.write('missing files: %s' % (', '.join(missing))) + result = 1 + + return result def rebuildmeta(ui, repo, hg_repo_path, args, **opts): """rebuild hgsubversion metadata using values stored in revisions """ - if len(args) != 1: - url = repo.ui.expandpath(dest or 'default-push', dest or 'default') - else: - url = args[0] + dest = None + if len(args) == 1: + dest = args[0] + elif len(args) > 1: + raise hgutil.Abort('rebuildmeta takes 1 or no arguments') uuid = None - url = util.normalize_url(url.rstrip('/')) - user, passwd = util.getuserpass(opts) - svn = svnwrap.SubversionRepo(url, user, passwd) + url = repo.ui.expandpath(dest or 'default-push', dest or 'default') + svn = svnrepo.svnremoterepo(ui, url).svn subdir = svn.subdir svnmetadir = os.path.join(repo.path, 'svn') if not os.path.exists(svnmetadir): @@ -76,12 +81,39 @@ def rebuildmeta(ui, repo, hg_repo_path, last_rev = -1 branchinfo = {} noderevnums = {} + tagfile = os.path.join(svnmetadir, 'tagmap') + if os.path.exists(maps.TagMap.filepath(repo)): + os.unlink(maps.TagMap.filepath(repo)) + tags = maps.TagMap(repo) + + skipped = set() + for rev in repo: ctx = repo[rev] convinfo = ctx.extra().get('convert_revision', None) if not convinfo: continue + if '.hgtags' in ctx.files(): + parent = ctx.parents()[0] + parentdata = '' + if '.hgtags' in parent: + parentdata = parent.filectx('.hgtags').data() + newdata = ctx.filectx('.hgtags').data() + for newtag in newdata[len(parentdata):-1].split('\n'): + ha, tag = newtag.split(' ', 1) + tagged = repo[ha].extra().get('convert_revision', None) + if tagged is None: + tagged = -1 + else: + tagged = int(tagged[40:].split('@')[1]) + # This is max(tagged rev, tagging rev) because if it is a normal + # tag, the tagging revision has the right rev number. However, if it + # was an edited tag, then the tagged revision has the correct revision + # number. + tagging = int(convinfo[40:].split('@')[1]) + tagrev = max(tagged, tagging) + tags[tag] = node.bin(ha), tagrev # check that the conversion metadata matches expectations assert convinfo.startswith('svn:') @@ -101,12 +133,22 @@ def rebuildmeta(ui, repo, hg_repo_path, uuidfile.write(uuid) uuidfile.close() + # don't reflect closed branches + if (ctx.extra().get('close') and not ctx.files() or + ctx.parents()[0].node() in skipped): + skipped.add(ctx.node()) + continue + # find commitpath, write to revmap commitpath = revpath[len(subdir)+1:] if commitpath.startswith('branches'): commitpath = commitpath[len('branches/'):] elif commitpath == 'trunk': commitpath = '' + elif commitpath.startswith('tags'): + if ctx.extra().get('close'): + continue + commitpath = '../' + commitpath else: assert False, 'Unhandled case in rebuildmeta' revmap.write('%s %s %s\n' % (revision, ctx.hex(), commitpath)) @@ -117,7 +159,7 @@ def rebuildmeta(ui, repo, hg_repo_path, last_rev = revision # deal with branches - if ctx.extra().get('close'): # don't re-add, we just deleted! + if ctx.extra().get('close'): continue branch = ctx.branch() if branch == 'default': @@ -134,9 +176,14 @@ def rebuildmeta(ui, repo, hg_repo_path, branchinfo[branch] = (parentbranch, noderevnums.get(parent.node(), 0), revision) - + droprev = lambda x: x.rsplit('@', 1)[0] for cctx in ctx.children(): - if cctx.extra().get('close'): + # check if a child of this change closes this branch + # that's true if the close flag is set and the svn revision + # path is the same. droprev removes the revnumber so we + # can verify it is the same branch easily + if (cctx.extra().get('close') + and droprev(cctx.extra().get('convert_revision', '@')) == droprev(convinfo)): branchinfo.pop(branch, None) break @@ -173,24 +220,27 @@ def help(ui, args=None, **opts): def update(ui, args, repo, clean=False, **opts): """update to a specified Subversion revision number """ + assert len(args) == 1 rev = int(args[0]) - path = os.path.join(repo.path, 'svn', 'rev_map') + meta = repo.svnmeta() + answers = [] - for k,v in util.parse_revmap(path).iteritems(): + for k, v in meta.revmap.iteritems(): if k[0] == rev: answers.append((v, k[1])) + if len(answers) == 1: if clean: return hg.clean(repo, answers[0][0]) return hg.update(repo, answers[0][0]) elif len(answers) == 0: - ui.status('Revision %s did not produce an hg revision.\n' % rev) + ui.status('revision %s did not produce an hg revision\n' % rev) return 1 else: - ui.status('Ambiguous revision!\n') - ui.status('\n'.join(['%s on %s' % (node.hex(a[0]), a[1]) for a in - answers]+[''])) + ui.status('ambiguous revision!\n') + revs = ['%s on %s' % (node.hex(a[0]), a[1]) for a in answers] + [''] + ui.status('\n'.join(revs)) return 1 @@ -198,8 +248,8 @@ table = { 'update': update, 'help': help, 'rebuildmeta': rebuildmeta, - 'incoming': incoming, 'updateexternals': svnexternals.updateexternals, + 'verify': verify, } table.update(utility_commands.table) diff --git a/hgsubversion/svnexternals.py b/hgsubversion/svnexternals.py --- a/hgsubversion/svnexternals.py +++ b/hgsubversion/svnexternals.py @@ -242,9 +242,13 @@ class externalsupdater: args = ['svn'] + args self.ui.debug(_('updating externals: %r, cwd=%s\n') % (args, cwd)) shell = os.name == 'nt' - r = subprocess.call(args, cwd=cwd, shell=shell, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - if r != 0: raise hgutil.Abort("subprocess '%s' failed" % ' '.join(args)) + p = subprocess.Popen(args, cwd=cwd, shell=shell, + stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + for line in p.stdout: + self.ui.note(line) + p.wait() + if p.returncode != 0: + raise hgutil.Abort("subprocess '%s' failed" % ' '.join(args)) def updateexternals(ui, args, repo, **opts): """update repository externals diff --git a/hgsubversion/hg_delta_editor.py b/hgsubversion/svnmeta.py copy from hgsubversion/hg_delta_editor.py copy to hgsubversion/svnmeta.py --- a/hgsubversion/hg_delta_editor.py +++ b/hgsubversion/svnmeta.py @@ -1,33 +1,16 @@ -import cStringIO import cPickle as pickle import os -import sys import tempfile -import traceback from mercurial import context -from mercurial import hg -from mercurial import ui from mercurial import util as hgutil from mercurial import revlog from mercurial import node -from mercurial import error -from svn import delta -from svn import core -import svnexternals import util import maps +import editor -class MissingPlainTextError(Exception): - """Exception raised when the repo lacks a source file required for replaying - a txdelta. - """ - -class ReplayException(Exception): - """Exception raised when you try and commit but the replay encountered an - exception. - """ def pickle_atomic(data, file_path, dir=None): """pickle some data to a path atomically. @@ -45,70 +28,28 @@ def pickle_atomic(data, file_path, dir=N else: hgutil.rename(path, file_path) -def ieditor(fn): - """Helps identify methods used by the SVN editor interface. - - Stash any exception raised in the method on self. - - This is required because the SWIG bindings just mutate any exception into - a generic Subversion exception with no way of telling what the original was. - This allows the editor object to notice when you try and commit and really - got an exception in the replay process. - """ - def fun(self, *args, **kwargs): - try: - return fn(self, *args, **kwargs) - except: #pragma: no cover - if not hasattr(self, '_exception_info'): - self._exception_info = sys.exc_info() - raise - return fun - - -class HgChangeReceiver(delta.Editor): - def add_to_revmap(self, revnum, branch, node_hash): - f = open(self.revmap_file, 'a') - f.write(str(revnum) + ' ' + node.hex(node_hash) + ' ' + (branch or '') + '\n') - f.flush() - f.close() - self.revmap[revnum, branch] = node_hash - - def last_known_revision(self): - """Obtain the highest numbered -- i.e. latest -- revision known. - Currently, this function just iterates over the entire revision map - using the max() builtin. This may be slow for extremely large - repositories, but for now, it's fast enough. - """ - try: - return max(k[0] for k in self.revmap.iterkeys()) - except ValueError: - return 0 +class SVNMeta(object): - def __init__(self, repo=None, path=None, ui_=None, - subdir='', author_host='', - tag_locations=[], - authors=None, filemap=None, uuid=None): + def __init__(self, repo, uuid=None, subdir=''): """path is the path to the target hg repo. subdir is the subdirectory of the edits *on the svn server*. It is needed for stripping paths off in certain cases. """ - if repo and repo.ui and not ui_: - ui_ = repo.ui - if not ui_: - ui_ = ui.ui() - self.ui = ui_ - self.__setup_repo(uuid, repo, path, subdir) + self.ui = repo.ui + self.repo = repo + self.path = os.path.normpath(repo.join('..')) + + if not os.path.isdir(self.meta_data_dir): + os.makedirs(self.meta_data_dir) + self._set_uuid(uuid) + # TODO: validate subdir too + self.revmap = maps.RevMap(repo) - if not author_host: - author_host = self.ui.config('hgsubversion', 'defaulthost', uuid) - if not authors: - authors = self.ui.config('hgsubversion', 'authormap') - if not filemap: - filemap = self.ui.config('hgsubversion', 'filemap') - if not tag_locations: - tag_locations = self.ui.configlist('hgsubversion', 'tagpaths', ['tags']) + author_host = self.ui.config('hgsubversion', 'defaulthost', uuid) + authors = self.ui.config('hgsubversion', 'authormap') + tag_locations = self.ui.configlist('hgsubversion', 'tagpaths', ['tags']) self.usebranchnames = self.ui.configbool('hgsubversion', 'usebranchnames', True) @@ -121,7 +62,7 @@ class HgChangeReceiver(delta.Editor): f = open(self.branch_info_file) self.branches = pickle.load(f) f.close() - self.tags = {} + self.tags = maps.TagMap(repo) if os.path.exists(self.tag_locations_file): f = open(self.tag_locations_file) self.tag_locations = pickle.load(f) @@ -134,16 +75,57 @@ class HgChangeReceiver(delta.Editor): self.tag_locations.sort() self.tag_locations.reverse() - self.clear_current_info() self.authors = maps.AuthorMap(self.ui, self.authors_file, defaulthost=author_host) if authors: self.authors.load(authors) self.lastdate = '1970-01-01 00:00:00 -0000' - self.includepaths = {} - self.excludepaths = {} - if filemap and os.path.exists(filemap): - self.readfilemap(filemap) + self.filemap = maps.FileMap(repo) + + @property + def editor(self): + if not hasattr(self, '_editor'): + self._editor = editor.HgEditor(self) + return self._editor + + def _get_uuid(self): + return open(os.path.join(self.meta_data_dir, 'uuid')).read() + + def _set_uuid(self, uuid): + if not uuid: + return + elif os.path.isfile(os.path.join(self.meta_data_dir, 'uuid')): + stored_uuid = self._get_uuid() + assert stored_uuid + if uuid != stored_uuid: + raise hgutil.Abort('unable to operate on unrelated repository') + else: + if uuid: + f = open(os.path.join(self.meta_data_dir, 'uuid'), 'w') + f.write(uuid) + f.flush() + f.close() + else: + raise hgutil.Abort('unable to operate on unrelated repository') + + uuid = property(_get_uuid, _set_uuid, None, + 'Error-checked UUID of source Subversion repository.') + + @property + def meta_data_dir(self): + return os.path.join(self.path, '.hg', 'svn') + + @property + def branch_info_file(self): + return os.path.join(self.meta_data_dir, 'branch_info') + + @property + def tag_locations_file(self): + return os.path.join(self.meta_data_dir, 'tag_locations') + + @property + def authors_file(self): + return os.path.join(self.meta_data_dir, 'authors') def fixdate(self, date): if date is not None: @@ -154,86 +136,80 @@ class HgChangeReceiver(delta.Editor): date = self.lastdate return date - def __setup_repo(self, uuid, repo, path, subdir): - """Verify the repo is going to work out for us. - - This method will fail an assertion if the repo exists but doesn't have - the Subversion metadata. - """ - if repo: - self.repo = repo - self.path = os.path.normpath(self.repo.join('..')) - elif path: - self.repo = hg.repository(self.ui, path, - create=(not os.path.exists(path))) - self.path = os.path.normpath(os.path.join(path, '..')) - else: #pragma: no cover - raise TypeError("editor requires either a path or a repository " - "specified") - - if not os.path.isdir(self.meta_data_dir): - os.makedirs(self.meta_data_dir) - self._set_uuid(uuid) - # TODO: validate subdir too - - if os.path.isfile(self.revmap_file): - self.revmap = util.parse_revmap(self.revmap_file) - else: - self.revmap = {} - f = open(self.revmap_file, 'w') - f.write('%s\n' % util.REVMAP_FILE_VERSION) - f.flush() - f.close() - - def clear_current_info(self): - '''Clear the info relevant to a replayed revision so that the next - revision can be replayed. - ''' - # Map files to raw svn data (symlink prefix is preserved) - self.current_files = {} - self.deleted_files = {} - self.current_rev = None - self.current_files_exec = {} - self.current_files_symlink = {} - self.dir_batons = {} - # Map fully qualified destination file paths to module source path - self.copies = {} - self.missing_plaintexts = set() - self.commit_branches_empty = {} - self.base_revision = None - self.branches_to_delete = set() - self.externals = {} - - def _save_metadata(self): + def save(self): '''Save the Subversion metadata. This should really be called after every revision is created. ''' pickle_atomic(self.branches, self.branch_info_file, self.meta_data_dir) - def _path_and_branch_for_path(self, path, existing=True): - return self._split_branch_path(path, existing=existing)[:2] - - def _branch_for_path(self, path, existing=True): - return self._path_and_branch_for_path(path, existing=existing)[1] - - def _localname(self, path): + def localname(self, path): """Compute the local name for a branch located at path. """ - assert not path.startswith('tags/') if path == 'trunk': return None elif path.startswith('branches/'): return path[len('branches/'):] return '../%s' % path - def _remotename(self, branch): + def remotename(self, branch): if branch == 'default' or branch is None: return 'trunk' elif branch.startswith('../'): return branch[3:] return 'branches/%s' % branch - def _split_branch_path(self, path, existing=True): + def genextra(self, revnum, branch): + extra = {} + branchpath = 'trunk' + if branch: + extra['branch'] = branch + if branch.startswith('../'): + branchpath = branch[3:] + else: + branchpath = 'branches/%s' % branch + + subdir = self.subdir + if subdir and subdir[-1] == '/': + subdir = subdir[:-1] + if subdir and subdir[0] != '/': + subdir = '/' + subdir + + extra['convert_revision'] = 'svn:%(uuid)s%(path)s@%(rev)s' % { + 'uuid': self.uuid, + 'path': '%s/%s' % (subdir , branchpath), + 'rev': revnum, + } + return extra + + def normalize(self, path): + '''Normalize a path to strip of leading slashes and our subdir if we + have one. + ''' + if path and path[0] == '/': + path = path[1:] + if path and path.startswith(self.subdir): + path = path[len(self.subdir):] + if path and path[0] == '/': + path = path[1:] + return path + + def is_path_tag(self, path): + """If path could represent the path to a tag, returns the potential tag + name. Otherwise, returns False. + + Note that it's only a tag if it was copied from the path '' in a branch + (or tag) we have, for our purposes. + """ + path = self.normalize(path) + for tagspath in self.tag_locations: + onpath = path.startswith(tagspath) + longer = len(path) > len('%s/' % tagspath) + if path and onpath and longer: + tag, subpath = path[len(tagspath) + 1:], '' + return tag + return False + + def split_branch_path(self, path, existing=True): """Figure out which branch inside our repo this path represents, and also figure out which path inside that branch it is. @@ -243,18 +219,26 @@ class HgChangeReceiver(delta.Editor): branch. If existing=False, then it will guess what the branch would be if it were known. """ - path = self._normalize_path(path) - if path.startswith('tags/'): - return None, None, None + path = self.normalize(path) + if self.is_path_tag(path): + tag = self.is_path_tag(path) + matched = [t for t in self.tags.iterkeys() if tag.startswith(t+'/')] + if not matched: + return None, None, None + matched.sort(cmp=lambda x,y: cmp(len(x),len(y)), reverse=True) + brpath = tag[len(matched[0])+1:] + svrpath = path[:-(len(brpath)+1)] + ln = self.localname(svrpath) + return brpath, ln, svrpath test = '' path_comps = path.split('/') - while self._localname(test) not in self.branches and len(path_comps): + while self.localname(test) not in self.branches and len(path_comps): if not test: test = path_comps.pop(0) else: test += '/%s' % path_comps.pop(0) - if self._localname(test) in self.branches: - return path[len(test)+1:], self._localname(test), test + if self.localname(test) in self.branches: + return path[len(test)+1:], self.localname(test), test if existing: return None, None, None if path == 'trunk' or path.startswith('trunk/'): @@ -267,98 +251,30 @@ class HgChangeReceiver(delta.Editor): else: path = test.split('/')[-1] test = '/'.join(test.split('/')[:-1]) - ln = self._localname(test) + ln = self.localname(test) if ln and ln.startswith('../'): return None, None, None return path, ln, test - def set_current_rev(self, rev): - """Set the revision we're currently converting. - """ - self.current_rev = rev - - def set_file(self, path, data, isexec=False, islink=False): - if islink: - data = 'link ' + data - self.current_files[path] = data - self.current_files_exec[path] = isexec - self.current_files_symlink[path] = islink - if path in self.deleted_files: - del self.deleted_files[path] - if path in self.missing_plaintexts: - self.missing_plaintexts.remove(path) - - def delete_file(self, path): - self.deleted_files[path] = True - if path in self.current_files: - del self.current_files[path] - self.current_files_exec[path] = False - self.current_files_symlink[path] = False - self.ui.note('D %s\n' % path) - - def _normalize_path(self, path): - '''Normalize a path to strip of leading slashes and our subdir if we - have one. - ''' - if path and path[0] == '/': - path = path[1:] - if path and path.startswith(self.subdir): - path = path[len(self.subdir):] - if path and path[0] == '/': - path = path[1:] - return path - - def _is_file_included(self, subpath): - def checkpathinmap(path, mapping): - def rpairs(name): - yield '.', name - e = len(name) - while e != -1: - yield name[:e], name[e+1:] - e = name.rfind('/', 0, e) - - for pre, suf in rpairs(path): - try: - return mapping[pre] - except KeyError, err: - pass - return None - - if len(self.includepaths) and len(subpath): - inc = checkpathinmap(subpath, self.includepaths) - else: - inc = subpath - if len(self.excludepaths) and len(subpath): - exc = checkpathinmap(subpath, self.excludepaths) - else: - exc = None - if inc is None or exc is not None: - return False - return True + def _determine_parent_branch(self, p, src_path, src_rev, revnum): + if src_path is not None: + src_file, src_branch = self.split_branch_path(src_path)[:2] + src_tag = self.is_path_tag(src_path) + if src_tag != False or src_file == '': # case 2 + ln = self.localname(p) + if src_tag != False and src_tag in self.tags: + ci = self.repo[self.tags[src_tag]].extra()['convert_revision'] + src_rev, src_branch, = self.parse_converted_revision(ci) + return {ln: (src_branch, src_rev, revnum)} + return {} - def _is_path_valid(self, path): + def is_path_valid(self, path): if path is None: return False - subpath = self._split_branch_path(path)[0] + subpath = self.split_branch_path(path)[0] if subpath is None: return False - return self._is_file_included(subpath) - - def _is_path_tag(self, path): - """If path could represent the path to a tag, returns the potential tag - name. Otherwise, returns False. - - Note that it's only a tag if it was copied from the path '' in a branch - (or tag) we have, for our purposes. - """ - path = self._normalize_path(path) - for tagspath in self.tag_locations: - onpath = path.startswith(tagspath) - longer = len(path) > len('%s/' % tagspath) - if path and onpath and longer: - tag, subpath = path[len(tagspath) + 1:], '' - return tag - return False + return subpath in self.filemap def get_parent_svn_branch_and_rev(self, number, branch): number -= 1 @@ -394,59 +310,48 @@ class HgChangeReceiver(delta.Editor): def get_parent_revision(self, number, branch): '''Get the parent revision hash for a commit on a specific branch. ''' + tag = self.is_path_tag(self.remotename(branch)) + limitedtags = maps.TagMap(self.repo, endrev=number-1) + if tag and tag in limitedtags: + ha = limitedtags[tag] + return ha r, br = self.get_parent_svn_branch_and_rev(number, branch) if r is not None: return self.revmap[r, br] return revlog.nullid - def _svnpath(self, branch): - """Return the relative path in svn of branch. - """ - if branch == None or branch == 'default': - return 'trunk' - elif branch.startswith('../'): - return branch[3:] - return 'branches/%s' % branch - - def _determine_parent_branch(self, p, src_path, src_rev, revnum): - if src_path is not None: - src_file, src_branch = self._path_and_branch_for_path(src_path) - src_tag = self._is_path_tag(src_path) - if src_tag != False: - # also case 2 - src_branch, src_rev = self.tags[src_tag] - return {self._localname(p): (src_branch, src_rev, revnum )} - if src_file == '': - # case 2 - return {self._localname(p): (src_branch, src_rev, revnum )} - return {} + def parse_converted_revision(self, convertedrev): + branch, revnum = convertedrev[40:].rsplit('@', 1) + return int(revnum), self.localname(self.normalize(branch)) def update_branch_tag_map_for_rev(self, revision): paths = revision.paths added_branches = {} added_tags = {} - self.branches_to_delete = set() + self.closebranches = set() tags_to_delete = set() for p in sorted(paths): - t_name = self._is_path_tag(p) + t_name = self.is_path_tag(p) if t_name != False: src_p, src_rev = paths[p].copyfrom_path, paths[p].copyfrom_rev # if you commit to a tag, I'm calling you stupid and ignoring # you. if src_p is not None and src_rev is not None: - file, branch = self._path_and_branch_for_path(src_p) + file, branch = self.split_branch_path(src_p)[:2] if file is None: # some crazy people make tags from other tags - file = '' - from_tag = self._is_path_tag(src_p) + from_tag = self.is_path_tag(src_p) if not from_tag: continue - branch, src_rev = self.tags[from_tag] + if from_tag in self.tags: + ci = self.repo[self.tags[from_tag]].extra()['convert_revision'] + src_rev, branch, = self.parse_converted_revision(ci) + file = '' if t_name not in added_tags and file is '': added_tags[t_name] = branch, src_rev elif file: t_name = t_name[:-(len(file)+1)] - if src_rev > added_tags[t_name][1]: + if t_name in added_tags and src_rev > added_tags[t_name][1]: added_tags[t_name] = branch, src_rev elif (paths[p].action == 'D' and p.endswith(t_name) and t_name in self.tags): @@ -474,11 +379,11 @@ class HgChangeReceiver(delta.Editor): # already-known branches, so we mark them as deleted. # 6. It's a branch being replaced by another branch - the # action will be 'R'. - fi, br = self._path_and_branch_for_path(p) + fi, br = self.split_branch_path(p)[:2] if fi is not None: if fi == '': if paths[p].action == 'D': - self.branches_to_delete.add(br) # case 4 + self.closebranches.add(br) # case 4 elif paths[p].action == 'R': parent = self._determine_parent_branch( p, paths[p].copyfrom_path, paths[p].copyfrom_rev, @@ -487,67 +392,75 @@ class HgChangeReceiver(delta.Editor): continue # case 1 if paths[p].action == 'D': for known in self.branches: - if self._svnpath(known).startswith(p): - self.branches_to_delete.add(known) # case 5 + if self.remotename(known).startswith(p): + self.closebranches.add(known) # case 5 parent = self._determine_parent_branch( p, paths[p].copyfrom_path, paths[p].copyfrom_rev, revision.revnum) if not parent and paths[p].copyfrom_path: - bpath, branch = self._path_and_branch_for_path(p, False) + bpath, branch = self.split_branch_path(p, False)[:2] if (bpath is not None and branch not in self.branches and branch not in added_branches): parent = {branch: (None, 0, revision.revnum)} added_branches.update(parent) - rmtags = dict((t, self.tags[t][0]) for t in tags_to_delete) + def branchoftag(tag): + cr = self.repo[self.tags[tag]].extra()['convert_revision'] + return self.parse_converted_revision(cr)[1] + rmtags = dict((t, branchoftag(t)) for t in tags_to_delete) return { 'tags': (added_tags, rmtags), - 'branches': (added_branches, self.branches_to_delete), + 'branches': (added_branches, self.closebranches), } def save_tbdelta(self, tbdelta): - for t in tbdelta['tags'][1]: - del self.tags[t] for br in tbdelta['branches'][1]: del self.branches[br] - for t, info in tbdelta['tags'][0].items(): - self.ui.status('Tagged %s@%s as %s\n' % - (info[0] or 'trunk', info[1], t)) - self.tags.update(tbdelta['tags'][0]) self.branches.update(tbdelta['branches'][0]) - def _updateexternals(self): - if not self.externals: + def movetag(self, tag, hash, branch, rev, date): + if self.tags[tag] == hash: return - # Accumulate externals records for all branches - revnum = self.current_rev.revnum - branches = {} - for path, entry in self.externals.iteritems(): - if not self._is_path_valid(path): - self.ui.warn('WARNING: Invalid path %s in externals\n' % path) - continue - p, b, bp = self._split_branch_path(path) - if bp not in branches: - external = svnexternals.externalsfile() - parent = self.get_parent_revision(revnum, b) - pctx = self.repo[parent] - if '.hgsvnexternals' in pctx: - external.read(pctx['.hgsvnexternals'].data()) - branches[bp] = external - else: - external = branches[bp] - external[p] = entry - - # Register the file changes - for bp, external in branches.iteritems(): - path = bp + '/.hgsvnexternals' - if external: - self.set_file(path, external.write(), False, False) - else: - self.delete_file(path) - - def branchedits(self, branch, rev): - check = lambda x: x[0][1] == branch and x[0][0] < rev.revnum - return sorted(filter(check, self.revmap.iteritems()), reverse=True) + if branch == 'default': + branch = None + parentctx = self.repo[self.get_parent_revision(rev.revnum+1, branch)] + if '.hgtags' in parentctx: + tagdata = parentctx.filectx('.hgtags').data() + else: + tagdata = '' + tagdata += '%s %s\n' % (node.hex(hash), tag, ) + def hgtagsfn(repo, memctx, path): + assert path == '.hgtags' + return context.memfilectx(path=path, + data=tagdata, + islink=False, + isexec=False, + copied=False) + pextra = parentctx.extra() + revnum, branch = self.parse_converted_revision(pextra['convert_revision']) + newparent = None + for child in parentctx.children(): + cextra = child.extra() + if (self.parse_converted_revision(cextra['convert_revision'])[1] == branch + and cextra.get('close', False)): + newparent = child + if newparent: + parentctx = newparent + pextra = parentctx.extra() + revnum, branch = self.parse_converted_revision(pextra['convert_revision']) + ctx = context.memctx(self.repo, + (parentctx.node(), node.nullid), + rev.message or '...', + ['.hgtags', ], + hgtagsfn, + self.authors[rev.author], + date, + pextra) + new_hash = self.repo.commitctx(ctx) + if not newparent: + assert self.revmap[revnum, branch] == parentctx.node() + self.revmap[revnum, branch] = new_hash + self.tags[tag] = hash, rev.revnum + util.describe_commit(self.ui, new_hash, branch) def committags(self, delta, rev, endbranches): @@ -561,27 +474,38 @@ class HgChangeReceiver(delta.Editor): branches.setdefault(branch, []).append(('rm', tag, None)) for b, tags in branches.iteritems(): - + fromtag = self.is_path_tag(self.remotename(b)) # modify parent's .hgtags source - parent = self.repo[{None: 'default'}.get(b, b)] + parent = self.repo[self.get_parent_revision(rev.revnum, b)] if '.hgtags' not in parent: src = '' else: src = parent['.hgtags'].data() for op, tag, r in sorted(tags, reverse=True): if op == 'add': - tagged = node.hex(self.revmap[ - self.get_parent_svn_branch_and_rev(r+1, b)]) + if fromtag: + if fromtag in self.tags: + tagged = node.hex(self.tags[fromtag]) + else: + tagged = node.hex(self.revmap[ + self.get_parent_svn_branch_and_rev(r+1, b)]) elif op == 'rm': tagged = node.hex(node.nullid) src += '%s %s\n' % (tagged, tag) + self.tags[tag] = node.bin(tagged), rev.revnum # add new changeset containing updated .hgtags def fctxfun(repo, memctx, path): return context.memfilectx(path='.hgtags', data=src, islink=False, isexec=False, copied=None) - extra = util.build_extra(rev.revnum, b, self.uuid, self.subdir) + extra = self.genextra(rev.revnum, b) + if fromtag: + extra['branch'] = parent.extra().get('branch', 'default') + if not self.usebranchnames: + extra.pop('branch', None) + if b in endbranches or fromtag: + extra['close'] = 1 ctx = context.memctx(self.repo, (parent.node(), node.nullid), rev.message or ' ', @@ -591,505 +515,28 @@ class HgChangeReceiver(delta.Editor): date, extra) new = self.repo.commitctx(ctx) - if (rev.revnum, b) not in self.revmap: - self.add_to_revmap(rev.revnum, b, new) - if b in endbranches: - endbranches[b] = new - - def commit_current_delta(self, tbdelta): - if hasattr(self, '_exception_info'): #pragma: no cover - traceback.print_exception(*self._exception_info) - raise ReplayException() - if self.missing_plaintexts: - raise MissingPlainTextError() - self._updateexternals() - # paranoidly generate the list of files to commit - files_to_commit = set(self.current_files.keys()) - files_to_commit.update(self.current_files_symlink.keys()) - files_to_commit.update(self.current_files_exec.keys()) - files_to_commit.update(self.deleted_files.keys()) - # back to a list and sort so we get sane behavior - files_to_commit = list(files_to_commit) - files_to_commit.sort() - branch_batches = {} - rev = self.current_rev - date = self.fixdate(rev.date) - # build up the branches that have files on them - for f in files_to_commit: - if not self._is_path_valid(f): - continue - p, b = self._path_and_branch_for_path(f) - if b not in branch_batches: - branch_batches[b] = [] - branch_batches[b].append((p, f)) - - closebranches = {} - for branch in tbdelta['branches'][1]: - branchedits = self.branchedits(branch, rev) - if len(branchedits) < 1: - # can't close a branch that never existed - continue - ha = branchedits[0][1] - closebranches[branch] = ha - - # 1. handle normal commits - closedrevs = closebranches.values() - for branch, files in branch_batches.iteritems(): - if branch in self.commit_branches_empty and files: - del self.commit_branches_empty[branch] - files = dict(files) - - parents = (self.get_parent_revision(rev.revnum, branch), - revlog.nullid) - if parents[0] in closedrevs and branch in self.branches_to_delete: - continue - extra = util.build_extra(rev.revnum, branch, self.uuid, self.subdir) - if branch is not None: - if (branch not in self.branches - and branch not in self.repo.branchtags()): - continue - parent_ctx = self.repo.changectx(parents[0]) - if '.hgsvnexternals' not in parent_ctx and '.hgsvnexternals' in files: - # Do not register empty externals files - if (files['.hgsvnexternals'] in self.current_files - and not self.current_files[files['.hgsvnexternals']]): - del files['.hgsvnexternals'] - - def filectxfn(repo, memctx, path): - current_file = files[path] - if current_file in self.deleted_files: - raise IOError() - copied = self.copies.get(current_file) - flags = parent_ctx.flags(path) - is_exec = self.current_files_exec.get(current_file, 'x' in flags) - is_link = self.current_files_symlink.get(current_file, 'l' in flags) - if current_file in self.current_files: - data = self.current_files[current_file] - if is_link and data.startswith('link '): - data = data[len('link '):] - elif is_link: - self.ui.warn('file marked as link, but contains data: ' - '%s (%r)\n' % (current_file, flags)) - else: - data = parent_ctx.filectx(path).data() - return context.memfilectx(path=path, - data=data, - islink=is_link, isexec=is_exec, - copied=copied) - if not self.usebranchnames: - extra.pop('branch', None) - current_ctx = context.memctx(self.repo, - parents, - rev.message or '...', - files.keys(), - filectxfn, - self.authors[rev.author], - date, - extra) - new_hash = self.repo.commitctx(current_ctx) - util.describe_commit(self.ui, new_hash, branch) - if (rev.revnum, branch) not in self.revmap: - self.add_to_revmap(rev.revnum, branch, new_hash) - - # 2. handle branches that need to be committed without any files - for branch in self.commit_branches_empty: - ha = self.get_parent_revision(rev.revnum, branch) - if ha == node.nullid: - continue - parent_ctx = self.repo.changectx(ha) - def del_all_files(*args): - raise IOError - # True here meant nuke all files, shouldn't happen with branch closing - if self.commit_branches_empty[branch]: #pragma: no cover - raise hgutil.Abort('Empty commit to an open branch attempted. ' - 'Please report this issue.') - extra = util.build_extra(rev.revnum, branch, self.uuid, self.subdir) - if not self.usebranchnames: - extra.pop('branch', None) - current_ctx = context.memctx(self.repo, - (ha, node.nullid), - rev.message or ' ', - [], - del_all_files, - self.authors[rev.author], - date, - extra) - new_hash = self.repo.commitctx(current_ctx) - util.describe_commit(self.ui, new_hash, branch) - if (rev.revnum, branch) not in self.revmap: - self.add_to_revmap(rev.revnum, branch, new_hash) - - # 3. handle tags - if tbdelta['tags'][0] or tbdelta['tags'][1]: - self.committags(tbdelta['tags'], rev, closebranches) - - # 4. close any branches that need it - for branch in tbdelta['branches'][1]: - # self.get_parent_revision(rev.revnum, branch) - ha = closebranches.get(branch) - if ha is None: - continue - self.delbranch(branch, ha, rev) - - self._save_metadata() - self.clear_current_info() + if not fromtag and (rev.revnum, b) not in self.revmap: + self.revmap[rev.revnum, b] = new + if b in endbranches: + endbranches.pop(b) + bname = b or 'default' + self.ui.status('Marked branch %s as closed.\n' % bname) def delbranch(self, branch, node, rev): pctx = self.repo[node] - def filectxfun(repo, memctx, path): - return pctx[path] files = pctx.manifest().keys() - extra = {'close': 1} + extra = self.genextra(rev.revnum, branch) + extra['close'] = 1 if self.usebranchnames: extra['branch'] = branch or 'default' ctx = context.memctx(self.repo, (node, revlog.nullid), rev.message or util.default_commit_msg, - files, - filectxfun, + [], + lambda x, y, z: None, self.authors[rev.author], self.fixdate(rev.date), extra) new = self.repo.commitctx(ctx) self.ui.status('Marked branch %s as closed.\n' % (branch or 'default')) - - def readfilemap(self, filemapfile): - self.ui.note( - ('Reading file map from %s\n') - % filemapfile) - def addpathtomap(path, mapping, mapname): - if path in mapping: - self.ui.warn(('Duplicate %s entry in %s: "%d"\n') % - (mapname, filemapfile, path)) - else: - self.ui.debug(('%sing %s\n') % - (mapname.capitalize().strip('e'), path)) - mapping[path] = path - - f = open(filemapfile, 'r') - for line in f: - if line.strip() == '' or line.strip()[0] == '#': - continue - try: - cmd, path = line.split(' ', 1) - cmd = cmd.strip() - path = path.strip() - if cmd == 'include': - addpathtomap(path, self.includepaths, 'include') - elif cmd == 'exclude': - addpathtomap(path, self.excludepaths, 'exclude') - else: - self.ui.warn( - ('Unknown filemap command %s\n') - % cmd) - except IndexError: - self.ui.warn( - ('Ignoring bad line in filemap %s: %s\n') - % (filemapfile, line.rstrip())) - f.close() - - def meta_data_dir(self): - return os.path.join(self.path, '.hg', 'svn') - meta_data_dir = property(meta_data_dir) - - def meta_file_named(self, name): - return os.path.join(self.meta_data_dir, name) - - def revmap_file(self): - return self.meta_file_named('rev_map') - revmap_file = property(revmap_file) - - def _get_uuid(self): - return open(self.meta_file_named('uuid')).read() - - def _set_uuid(self, uuid): - if not uuid: - return self._get_uuid() - elif os.path.isfile(self.meta_file_named('uuid')): - stored_uuid = self._get_uuid() - assert stored_uuid - if uuid != stored_uuid: - raise hgutil.Abort('unable to operate on unrelated repository') - else: - return stored_uuid - else: - if uuid: - f = open(self.meta_file_named('uuid'), 'w') - f.write(uuid) - f.flush() - f.close() - return self._get_uuid() - else: - raise hgutil.Abort('unable to operate on unrelated repository') - - uuid = property(_get_uuid, _set_uuid, None, - 'Error-checked UUID of source Subversion repository.') - - def branch_info_file(self): - return self.meta_file_named('branch_info') - branch_info_file = property(branch_info_file) - - def tag_locations_file(self): - return self.meta_file_named('tag_locations') - tag_locations_file = property(tag_locations_file) - - def authors_file(self): - return self.meta_file_named('authors') - authors_file = property(authors_file) - - def aresamefiles(self, parentctx, childctx, files): - """Assuming all files exist in childctx and parentctx, return True - if none of them was changed in-between. - """ - if parentctx == childctx: - return True - if parentctx.rev() > childctx.rev(): - parentctx, childctx = childctx, parentctx - - def selfandancestors(selfctx): - yield selfctx - for ctx in selfctx.ancestors(): - yield ctx - - files = dict.fromkeys(files) - for pctx in selfandancestors(childctx): - if pctx.rev() <= parentctx.rev(): - return True - for f in pctx.files(): - if f in files: - return False - # parentctx is not an ancestor of childctx, files are unrelated - return False - - # Here come all the actual editor methods - - @ieditor - def delete_entry(self, path, revision_bogus, parent_baton, pool=None): - br_path, branch = self._path_and_branch_for_path(path) - if br_path == '': - self.branches_to_delete.add(branch) - if br_path is not None: - ha = self.get_parent_revision(self.current_rev.revnum, branch) - if ha == revlog.nullid: - return - ctx = self.repo.changectx(ha) - if br_path not in ctx: - br_path2 = '' - if br_path != '': - br_path2 = br_path + '/' - # assuming it is a directory - self.externals[path] = None - map(self.delete_file, [pat for pat in self.current_files.iterkeys() - if pat.startswith(path+'/')]) - for f in ctx.walk(util.PrefixMatch(br_path2)): - f_p = '%s/%s' % (path, f[len(br_path2):]) - if f_p not in self.current_files: - self.delete_file(f_p) - self.delete_file(path) - - @ieditor - def open_file(self, path, parent_baton, base_revision, p=None): - self.current_file = None - fpath, branch = self._path_and_branch_for_path(path) - if not fpath: - self.ui.debug('WARNING: Opening non-existant file %s\n' % path) - return - - self.current_file = path - self.ui.note('M %s\n' % path) - if base_revision != -1: - self.base_revision = base_revision - else: - self.base_revision = None - - if self.current_file in self.current_files: - return - - baserev = base_revision - if baserev is None or baserev == -1: - baserev = self.current_rev.revnum - 1 - parent = self.get_parent_revision(baserev + 1, branch) - - ctx = self.repo[parent] - if not self._is_path_valid(path): - return - - if fpath not in ctx: - self.missing_plaintexts.add(path) - - fctx = ctx.filectx(fpath) - base = fctx.data() - if 'l' in fctx.flags(): - base = 'link ' + base - self.set_file(path, base, 'x' in fctx.flags(), 'l' in fctx.flags()) - - @ieditor - def add_file(self, path, parent_baton=None, copyfrom_path=None, - copyfrom_revision=None, file_pool=None): - self.current_file = None - self.base_revision = None - if path in self.deleted_files: - del self.deleted_files[path] - fpath, branch = self._path_and_branch_for_path(path, existing=False) - if not fpath: - return - if branch not in self.branches: - # we know this branch will exist now, because it has at least one file. Rock. - self.branches[branch] = None, 0, self.current_rev.revnum - self.current_file = path - if not copyfrom_path: - self.ui.note('A %s\n' % path) - self.set_file(path, '', False, False) - return - self.ui.note('A+ %s\n' % path) - (from_file, - from_branch) = self._path_and_branch_for_path(copyfrom_path) - if not from_file: - self.missing_plaintexts.add(path) - return - ha = self.get_parent_revision(copyfrom_revision + 1, - from_branch) - ctx = self.repo.changectx(ha) - if from_file in ctx: - fctx = ctx.filectx(from_file) - flags = fctx.flags() - self.set_file(path, fctx.data(), 'x' in flags, 'l' in flags) - if from_branch == branch: - parentid = self.get_parent_revision(self.current_rev.revnum, - branch) - if parentid != revlog.nullid: - parentctx = self.repo.changectx(parentid) - if self.aresamefiles(parentctx, ctx, [from_file]): - self.copies[path] = from_file - - @ieditor - def add_directory(self, path, parent_baton, copyfrom_path, - copyfrom_revision, dir_pool=None): - self.dir_batons[path] = path - br_path, branch = self._path_and_branch_for_path(path) - if br_path is not None: - if not copyfrom_path and not br_path: - self.commit_branches_empty[branch] = True - else: - self.commit_branches_empty[branch] = False - if br_path is None or not copyfrom_path: - return path - if copyfrom_path: - tag = self._is_path_tag(copyfrom_path) - if tag not in self.tags: - tag = None - if not self._is_path_valid(copyfrom_path) and not tag: - self.missing_plaintexts.add('%s/' % path) - return path - if tag: - source_branch, source_rev = self.tags[tag] - cp_f = '' - else: - source_rev = copyfrom_revision - cp_f, source_branch = self._path_and_branch_for_path(copyfrom_path) - if cp_f == '' and br_path == '': - assert br_path is not None - self.branches[branch] = source_branch, source_rev, self.current_rev.revnum - new_hash = self.get_parent_revision(source_rev + 1, - source_branch) - if new_hash == node.nullid: - self.missing_plaintexts.add('%s/' % path) - return path - cp_f_ctx = self.repo.changectx(new_hash) - if cp_f != '/' and cp_f != '': - cp_f = '%s/' % cp_f - else: - cp_f = '' - copies = {} - for f in cp_f_ctx: - if not f.startswith(cp_f): - continue - f2 = f[len(cp_f):] - fctx = cp_f_ctx.filectx(f) - fp_c = path + '/' + f2 - self.set_file(fp_c, fctx.data(), 'x' in fctx.flags(), 'l' in fctx.flags()) - if fp_c in self.deleted_files: - del self.deleted_files[fp_c] - if branch == source_branch: - copies[fp_c] = f - if copies: - # Preserve the directory copy records if no file was changed between - # the source and destination revisions, or discard it completely. - parentid = self.get_parent_revision(self.current_rev.revnum, branch) - if parentid != revlog.nullid: - parentctx = self.repo.changectx(parentid) - if self.aresamefiles(parentctx, cp_f_ctx, copies.values()): - self.copies.update(copies) - return path - - @ieditor - def change_file_prop(self, file_baton, name, value, pool=None): - if name == 'svn:executable': - self.current_files_exec[self.current_file] = bool(value is not None) - elif name == 'svn:special': - self.current_files_symlink[self.current_file] = bool(value is not None) - - @ieditor - def change_dir_prop(self, dir_baton, name, value, pool=None): - if dir_baton is None: - return - path = self.dir_batons[dir_baton] - if name == 'svn:externals': - self.externals[path] = value - - @ieditor - def open_directory(self, path, parent_baton, base_revision, dir_pool=None): - self.dir_batons[path] = path - p_, branch = self._path_and_branch_for_path(path) - if p_ == '': - self.commit_branches_empty[branch] = False - return path - - @ieditor - def close_directory(self, dir_baton, dir_pool=None): - if dir_baton is not None: - del self.dir_batons[dir_baton] - - @ieditor - def apply_textdelta(self, file_baton, base_checksum, pool=None): - # We know coming in here the file must be one of the following options: - # 1) Deleted (invalid, fail an assertion) - # 2) Missing a base text (bail quick since we have to fetch a full plaintext) - # 3) Has a base text in self.current_files, apply deltas - base = '' - if not self._is_path_valid(self.current_file): - return lambda x: None - assert self.current_file not in self.deleted_files, ( - 'Cannot apply_textdelta to a deleted file: %s' % self.current_file) - assert (self.current_file in self.current_files - or self.current_file in self.missing_plaintexts), '%s not found' % self.current_file - if self.current_file in self.missing_plaintexts: - return lambda x: None - base = self.current_files[self.current_file] - source = cStringIO.StringIO(base) - target = cStringIO.StringIO() - self.stream = target - - handler, baton = delta.svn_txdelta_apply(source, target, None) - if not callable(handler): #pragma: no cover - raise hgutil.Abort('Error in Subversion bindings: ' - 'cannot call handler!') - def txdelt_window(window): - try: - if not self._is_path_valid(self.current_file): - return - handler(window, baton) - # window being None means commit this file - if not window: - self.current_files[self.current_file] = target.getvalue() - except core.SubversionException, e: #pragma: no cover - if e.apr_err == core.SVN_ERR_INCOMPLETE_DATA: - self.missing_plaintexts.add(self.current_file) - else: #pragma: no cover - raise hgutil.Abort(*e.args) - except: #pragma: no cover - print len(base), self.current_file - self._exception_info = sys.exc_info() - raise - return txdelt_window diff --git a/hgsubversion/svnrepo.py b/hgsubversion/svnrepo.py --- a/hgsubversion/svnrepo.py +++ b/hgsubversion/svnrepo.py @@ -15,30 +15,22 @@ subclass: pull() is called on the instan """ from mercurial import error -from mercurial import node from mercurial import util as hgutil from mercurial import httprepo import mercurial.repo -import hg_delta_editor import util import wrappers +import svnwrap +import svnmeta + +propertycache = hgutil.propertycache def generate_repo_class(ui, repo): """ This function generates the local repository wrapper. """ superclass = repo.__class__ - def localsvn(fn): - """ - Filter for instance methods which only apply to local Subversion - repositories. - """ - if util.is_svn_repo(repo): - return fn - else: - return getattr(repo, fn.__name__) - def remotesvn(fn): """ Filter for instance methods which require the first argument @@ -68,25 +60,44 @@ def generate_repo_class(ui, repo): def findoutgoing(self, remote, base=None, heads=None, force=False): return wrappers.outgoing(repo, remote, heads, force) - @remotesvn - def findcommonincoming(self, remote, base=None, heads=None, - force=False): - raise hgutil.Abort('cannot display incoming changes from ' - 'Subversion repositories, yet') + def svnmeta(self, uuid=None, subdir=''): + return svnmeta.SVNMeta(self, uuid, subdir) repo.__class__ = svnlocalrepo class svnremoterepo(mercurial.repo.repository): """ the dumb wrapper for actual Subversion repositories """ - def __init__(self, ui, path): + def __init__(self, ui, path=None): self.ui = ui + if path is None: + path = self.ui.config('paths', 'default') self.path = path self.capabilities = set(['lookup', 'subversion']) + @propertycache + def svnauth(self): + # DO NOT default the user to hg's getuser(). If you provide + # *any* default username to Subversion, it won't use any remembered + # username for the desired realm, breaking OS X Keychain support, + # GNOME keyring support, and all similar tools. + user = self.ui.config('hgsubversion', 'username') + passwd = self.ui.config('hgsubversion', 'password') + url = util.normalize_url(self.path) + user, passwd, url = svnwrap.parse_url(url, user, passwd) + return url, user, passwd + @property def svnurl(self): - return util.normalize_url(self.path) + return self.svnauth[0] + + @propertycache + def svn(self): + return svnwrap.SubversionRepo(*self.svnauth) + + @property + def svnuuid(self): + return self.svn.uuid def url(self): return self.path diff --git a/hgsubversion/svnwrap/svn_swig_wrapper.py b/hgsubversion/svnwrap/svn_swig_wrapper.py --- a/hgsubversion/svnwrap/svn_swig_wrapper.py +++ b/hgsubversion/svnwrap/svn_swig_wrapper.py @@ -115,20 +115,24 @@ def _create_auth_baton(pool): return core.svn_auth_open(providers, pool) -def parse_url(url): + +def parse_url(url, user=None, passwd=None): """Parse a URL and return a tuple (username, password, url) """ scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) - user, passwd = None, None if '@' in netloc: userpass, netloc = netloc.split('@') - if ':' in userpass: - user, passwd = userpass.split(':') - user, passwd = urllib.unquote(user) or None, urllib.unquote(passwd) or None - else: - user = urllib.unquote(userpass) or None + if not user and not passwd: + if ':' in userpass: + user, passwd = userpass.split(':') + else: + user, passwd = userpass, '' + user, passwd = urllib.unquote(user), urllib.unquote(passwd) + if user and scheme == 'svn+ssh': + netloc = '@'.join((user, netloc, )) url = urlparse.urlunparse((scheme, netloc, path, params, query, fragment)) - return (user, passwd, url) + return (user or None, passwd or None, url) + class Revision(tuple): """Wrapper for a Subversion revision. @@ -178,10 +182,10 @@ class SubversionRepo(object): It takes a required param, the URL. """ def __init__(self, url='', username='', password='', head=None): - parsed = parse_url(url) + parsed = parse_url(url, username, password) # --username and --password override URL credentials - self.username = username or parsed[0] - self.password = password or parsed[1] + self.username = parsed[0] + self.password = parsed[1] self.svn_url = parsed[2] self.auth_baton_pool = core.Pool() self.auth_baton = _create_auth_baton(self.auth_baton_pool) diff --git a/hgsubversion/util.py b/hgsubversion/util.py --- a/hgsubversion/util.py +++ b/hgsubversion/util.py @@ -1,17 +1,63 @@ +import re import os -import shutil +import urllib from mercurial import hg from mercurial import node from mercurial import util as hgutil -def getuserpass(opts): - # DO NOT default the user to hg's getuser(). If you provide - # *any* default username to Subversion, it won't use any remembered - # username for the desired realm, breaking OS X Keychain support, - # GNOME keyring support, and all similar tools. - return opts.get('username', None), opts.get('password', '') +b_re = re.compile(r'^\+\+\+ b\/([^\n]*)', re.MULTILINE) +a_re = re.compile(r'^--- a\/([^\n]*)', re.MULTILINE) +devnull_re = re.compile(r'^([-+]{3}) /dev/null', re.MULTILINE) +header_re = re.compile(r'^diff --git .* b\/(.*)', re.MULTILINE) +newfile_devnull_re = re.compile(r'^--- /dev/null\n\+\+\+ b/([^\n]*)', + re.MULTILINE) + + +def formatrev(rev): + if rev == -1: + return '\t(working copy)' + return '\t(revision %d)' % rev + + +def filterdiff(diff, oldrev, newrev): + diff = newfile_devnull_re.sub(r'--- \1\t(revision 0)' '\n' + r'+++ \1\t(working copy)', + diff) + oldrev = formatrev(oldrev) + newrev = formatrev(newrev) + diff = a_re.sub(r'--- \1'+ oldrev, diff) + diff = b_re.sub(r'+++ \1' + newrev, diff) + diff = devnull_re.sub(r'\1 /dev/null\t(working copy)', diff) + diff = header_re.sub(r'Index: \1' + '\n' + ('=' * 67), diff) + return diff + + +def parentrev(ui, repo, meta, hashes): + """Find the svn parent revision of the repo's dirstate. + """ + workingctx = repo.parents()[0] + outrev = outgoing_revisions(repo, hashes, workingctx.node()) + if outrev: + workingctx = repo[outrev[-1]].parents()[0] + return workingctx + + +def islocalrepo(url): + if not url.startswith('file:///'): + return False + if '#' in url.split('/')[-1]: # strip off #anchor + url = url[:url.rfind('#')] + path = url[len('file://'):] + path = urllib.url2pathname(path).replace(os.sep, '/') + while '/' in path: + if reduce(lambda x,y: x and y, + map(lambda p: os.path.exists(os.path.join(path, p)), + ('hooks', 'format', 'db', ))): + return True + path = path.rsplit('/', 1)[0] + return False def version(ui): @@ -24,35 +70,16 @@ def version(ui): return node.hex(ver)[:12] -def normalize_url(svnurl): - url, revs, checkout = hg.parseurl(svnurl) +def normalize_url(url): + if url.startswith('svn+http://') or url.startswith('svn+https://'): + url = url[4:] + url, revs, checkout = hg.parseurl(url) url = url.rstrip('/') if checkout: url = '%s#%s' % (url, checkout) return url -REVMAP_FILE_VERSION = 1 -def parse_revmap(revmap_filename): - revmap = {} - f = open(revmap_filename) - ver = int(f.readline()) - if ver == 1: - for l in f: - revnum, node_hash, branch = l.split(' ', 2) - if branch == '\n': - branch = None - else: - branch = branch[:-1] - revmap[int(revnum), branch] = node.bin(node_hash) - f.close() - else: #pragma: no cover - print ('Your revmap was made by a newer version of hgsubversion.' - ' Please upgrade.') - raise NotImplementedError - return revmap - - class PrefixMatch(object): def __init__(self, prefix): self.p = prefix @@ -63,7 +90,7 @@ class PrefixMatch(object): def __call__(self, fn): return fn.startswith(self.p) -def outgoing_revisions(ui, repo, hg_editor, reverse_map, sourcerev): +def outgoing_revisions(repo, reverse_map, sourcerev): """Given a repo and an hg_editor, determines outgoing revisions for the current working copy state. """ @@ -81,45 +108,39 @@ def outgoing_revisions(ui, repo, hg_edit if sourcerev.node() != node.nullid: return outgoing_rev_hashes -def build_extra(revnum, branch, uuid, subdir): - extra = {} - branchpath = 'trunk' - if branch: - extra['branch'] = branch - branchpath = 'branches/%s' % branch - if subdir and subdir[-1] == '/': - subdir = subdir[:-1] - if subdir and subdir[0] != '/': - subdir = '/' + subdir - extra['convert_revision'] = 'svn:%(uuid)s%(path)s@%(rev)s' % { - 'uuid': uuid, - 'path': '%s/%s' % (subdir , branchpath), - 'rev': revnum, - } - return extra - - -def is_svn_repo(repo): - return os.path.exists(os.path.join(repo.path, 'svn', 'uuid')) - default_commit_msg = '*** empty log message ***' -def describe_revision(ui, r): - try: - msg = [s for s in map(str.strip, r.message.splitlines()) if s][0] - except: - msg = default_commit_msg - - ui.status(('[r%d] %s: %s' % (r.revnum, r.author, msg))[:80] + '\n') - def describe_commit(ui, h, b): ui.note(' committed to "%s" as %s\n' % ((b or 'default'), node.short(h))) def swap_out_encoding(new_encoding="UTF-8"): - """ Utility for mercurial incompatibility changes, can be removed after 1.3 - """ from mercurial import encoding old = encoding.encoding encoding.encoding = new_encoding return old + + +def aresamefiles(parentctx, childctx, files): + """Assuming all files exist in childctx and parentctx, return True + if none of them was changed in-between. + """ + if parentctx == childctx: + return True + if parentctx.rev() > childctx.rev(): + parentctx, childctx = childctx, parentctx + + def selfandancestors(selfctx): + yield selfctx + for ctx in selfctx.ancestors(): + yield ctx + + files = dict.fromkeys(files) + for pctx in selfandancestors(childctx): + if pctx.rev() <= parentctx.rev(): + return True + for f in pctx.files(): + if f in files: + return False + # parentctx is not an ancestor of childctx, files are unrelated + return False diff --git a/hgsubversion/utility_commands.py b/hgsubversion/utility_commands.py --- a/hgsubversion/utility_commands.py +++ b/hgsubversion/utility_commands.py @@ -3,61 +3,45 @@ import os from mercurial import util as hgutil import svnwrap -import cmdutil +import svnrepo import util -import hg_delta_editor -def genignore(ui, repo, hg_repo_path, force=False, **opts): +def genignore(ui, repo, force=False, **opts): """generate .hgignore from svn:ignore properties. """ - ignpath = os.path.join(hg_repo_path, '.hgignore') + ignpath = repo.wjoin('.hgignore') if not force and os.path.exists(ignpath): raise hgutil.Abort('not overwriting existing .hgignore, try --force?') - ignorefile = open(ignpath, 'w') - ignorefile.write('.hgignore\nsyntax:glob\n') - url = util.normalize_url(repo.ui.config('paths', 'default')) - user, passwd = util.getuserpass(opts) - svn = svnwrap.SubversionRepo(url, user, passwd) - hge = hg_delta_editor.HgChangeReceiver(path=hg_repo_path, repo=repo, - ui_=ui, uuid=svn.uuid) - svn_commit_hashes = dict(zip(hge.revmap.itervalues(), - hge.revmap.iterkeys())) - parent = cmdutil.parentrev(ui, repo, hge, svn_commit_hashes) - r, br = svn_commit_hashes[parent.node()] - if br == None: - branchpath = 'trunk' - else: - branchpath = 'branches/%s' % br - if url[-1] == '/': - url = url[:-1] - dirs = [''] + [d[0] for d in svn.list_files(branchpath, r) if d[1] == 'd'] + svn = svnrepo.svnremoterepo(repo.ui).svn + meta = repo.svnmeta() + hashes = meta.revmap.hashes() + parent = util.parentrev(ui, repo, meta, hashes) + r, br = hashes[parent.node()] + branchpath = br and ('branches/%s' % br) or 'trunk' + ignorelines = ['.hgignore', 'syntax:glob'] + dirs = [''] + [d[0] for d in svn.list_files(branchpath, r) + if d[1] == 'd'] for dir in dirs: - props = svn.list_props('%s/%s/' % (branchpath,dir), r) - if 'svn:ignore' in props: - lines = props['svn:ignore'].strip().split('\n') - for prop in lines: - if dir: - ignorefile.write('%s/%s\n' % (dir, prop)) - else: - ignorefile.write('%s\n' % prop) + props = svn.list_props('%s/%s/' % (branchpath, dir), r) + if 'svn:ignore' not in props: + continue + lines = props['svn:ignore'].strip().split('\n') + ignorelines += [dir and (dir + '/' + prop) or prop for prop in lines] + + repo.wopener('.hgignore', 'w').write('\n'.join(ignorelines) + '\n') def info(ui, repo, hg_repo_path, **opts): """show Subversion details similar to `svn info' """ - url = util.normalize_url(repo.ui.config('paths', 'default')) - user, passwd = util.getuserpass(opts) - svn = svnwrap.SubversionRepo(url, user, passwd) - hge = hg_delta_editor.HgChangeReceiver(path=hg_repo_path, repo=repo, - ui_=ui, uuid=svn.uuid) - svn_commit_hashes = dict(zip(hge.revmap.itervalues(), - hge.revmap.iterkeys())) - parent = cmdutil.parentrev(ui, repo, hge, svn_commit_hashes) + meta = repo.svnmeta() + hashes = meta.revmap.hashes() + parent = util.parentrev(ui, repo, meta, hashes) pn = parent.node() - if pn not in svn_commit_hashes: + if pn not in hashes: ui.status('Not a child of an svn revision.\n') return 0 - r, br = svn_commit_hashes[pn] + r, br = hashes[pn] subdir = parent.extra()['convert_revision'][40:].split('@')[0] if br == None: branchpath = '/trunk' @@ -66,11 +50,9 @@ def info(ui, repo, hg_repo_path, **opts) subdir = subdir.replace('branches/../', '') else: branchpath = '/branches/%s' % br - url = util.normalize_url(repo.ui.config('paths', 'default')) - if url[-1] == '/': - url = url[:-1] - url = '%s%s' % (url, branchpath) - author = hge.authors.reverselookup(parent.user()) + remoterepo = svnrepo.svnremoterepo(repo.ui) + url = '%s%s' % (remoterepo.svnurl, branchpath) + author = meta.authors.reverselookup(parent.user()) # cleverly figure out repo root w/o actually contacting the server reporoot = url[:len(url)-len(subdir)] ui.status('''URL: %(url)s @@ -82,7 +64,7 @@ Last Changed Author: %(author)s Last Changed Rev: %(revision)s Last Changed Date: %(date)s\n''' % {'reporoot': reporoot, - 'uuid': hge.uuid, + 'uuid': meta.uuid, 'url': url, 'author': author, 'revision': r, @@ -98,7 +80,7 @@ def listauthors(ui, args, authors=None, if not len(args): ui.status('No repository specified.\n') return - svn = svnwrap.SubversionRepo(util.normalize_url(args[0])) + svn = svnrepo.svnremoterepo(ui, args[0]).svn author_set = set() for rev in svn.revisions(): author_set.add(str(rev.author)) # So None becomes 'None' diff --git a/hgsubversion/wrappers.py b/hgsubversion/wrappers.py --- a/hgsubversion/wrappers.py +++ b/hgsubversion/wrappers.py @@ -1,9 +1,6 @@ -import os - from hgext import rebase as hgrebase -from mercurial import cmdutil as hgcmdutil -from mercurial import commands +from mercurial import cmdutil from mercurial import patch from mercurial import hg from mercurial import util as hgutil @@ -13,33 +10,62 @@ from mercurial import i18n from svn import core from svn import delta -import cmdutil -import hg_delta_editor +import replay +import pushmod import stupid as stupidmod import svnwrap +import svnrepo import util pullfuns = { - True: cmdutil.replay_convert_rev, + True: replay.convert_rev, False: stupidmod.convert_rev, } -def parent(orig, ui, repo, *args, **opts): +revmeta = [ + ('revision', 'revnum'), + ('user', 'author'), + ('date', 'date'), + ('message', 'message'), +] + +def parents(orig, ui, repo, *args, **opts): """show Mercurial & Subversion parents of the working dir or revision """ if not opts.get('svn', False): return orig(ui, repo, *args, **opts) - hge = hg_delta_editor.HgChangeReceiver(repo=repo) - svn_commit_hashes = dict(zip(hge.revmap.itervalues(), - hge.revmap.iterkeys())) - ha = cmdutil.parentrev(ui, repo, hge, svn_commit_hashes) + meta = repo.svnmeta() + hashes = meta.revmap.hashes() + ha = util.parentrev(ui, repo, meta, hashes) if ha.node() == node.nullid: raise hgutil.Abort('No parent svn revision!') - displayer = hgcmdutil.show_changeset(ui, repo, opts, buffered=False) + displayer = cmdutil.show_changeset(ui, repo, opts, buffered=False) displayer.show(ha) return 0 +def incoming(orig, ui, repo, source='default', **opts): + """show incoming revisions from Subversion + """ + + source, revs, checkout = hg.parseurl(ui.expandpath(source)) + other = hg.repository(ui, source) + if 'subversion' not in other.capabilities: + return orig(ui, repo, source, **opts) + + meta = repo.svnmeta() + + ui.status('incoming changes from %s\n' % other.svnurl) + for r in other.svn.revisions(start=meta.revmap.seen): + ui.status('\n') + for label, attr in revmeta: + l1 = label + ':' + val = str(getattr(r, attr)).strip() + if not ui.verbose: + val = val.split('\n')[0] + ui.status('%s%s\n' % (l1.ljust(13), val)) + + def outgoing(repo, dest=None, heads=None, force=False): """show changesets not found in the Subversion repository """ @@ -47,11 +73,10 @@ def outgoing(repo, dest=None, heads=None # split off #rev; TODO implement --revision/#rev support svnurl, revs, checkout = hg.parseurl(dest.svnurl, heads) - hge = hg_delta_editor.HgChangeReceiver(repo=repo) - svn_commit_hashes = dict(zip(hge.revmap.itervalues(), - hge.revmap.iterkeys())) - return util.outgoing_revisions(repo.ui, repo, hge, svn_commit_hashes, - repo.parents()[0].node()) + meta = repo.svnmeta() + parent = repo.parents()[0].node() + hashes = meta.revmap.hashes() + return util.outgoing_revisions(repo, hashes, parent) def diff(orig, ui, repo, *args, **opts): @@ -59,20 +84,17 @@ def diff(orig, ui, repo, *args, **opts): """ if not opts.get('svn', False) or opts.get('change', None): return orig(ui, repo, *args, **opts) - svn_commit_hashes = {} - hge = hg_delta_editor.HgChangeReceiver(repo=repo) - svn_commit_hashes = dict(zip(hge.revmap.itervalues(), - hge.revmap.iterkeys())) + meta = repo.svnmeta() + hashes = meta.revmap.hashes() if not opts.get('rev', None): parent = repo.parents()[0] - o_r = util.outgoing_revisions(ui, repo, hge, svn_commit_hashes, - parent.node()) + o_r = util.outgoing_revisions(repo, hashes, parent.node()) if o_r: parent = repo[o_r[-1]].parents()[0] opts['rev'] = ['%s:.' % node.hex(parent.node()), ] - node1, node2 = hgcmdutil.revpair(repo, opts['rev']) - baserev, _junk = svn_commit_hashes.get(node1, (-1, 'junk', )) - newrev, _junk = svn_commit_hashes.get(node2, (-1, 'junk', )) + node1, node2 = cmdutil.revpair(repo, opts['rev']) + baserev, _junk = hashes.get(node1, (-1, 'junk')) + newrev, _junk = hashes.get(node2, (-1, 'junk')) it = patch.diff(repo, node1, node2, opts=patch.diffopts(ui, opts={'git': True, 'show_function': False, @@ -82,22 +104,20 @@ def diff(orig, ui, repo, *args, **opts): 'unified': True, 'text': False, })) - ui.write(cmdutil.filterdiff(''.join(it), baserev, newrev)) + ui.write(util.filterdiff(''.join(it), baserev, newrev)) def push(repo, dest, force, revs): """push revisions starting at a specified head back to Subversion. """ assert not revs, 'designated revisions for push remains unimplemented.' + cmdutil.bail_if_changed(repo) ui = repo.ui - svnurl = util.normalize_url(repo.ui.expandpath(dest.svnurl)) old_encoding = util.swap_out_encoding() - # split of #rev; TODO: implement --rev/#rev support - svnurl, revs, checkout = hg.parseurl(svnurl, revs) + # TODO: implement --rev/#rev support # TODO: do credentials specified in the URL still work? - user = repo.ui.config('hgsubversion', 'username') - passwd = repo.ui.config('hgsubversion', 'password') - svn = svnwrap.SubversionRepo(svnurl, user, passwd) - hge = hg_delta_editor.HgChangeReceiver(repo=repo, uuid=svn.uuid) + svnurl = repo.ui.expandpath(dest.svnurl) + svn = svnrepo.svnremoterepo(repo.ui, svnurl).svn + meta = repo.svnmeta(svn.uuid) # Strategy: # 1. Find all outgoing commits from this head @@ -106,9 +126,8 @@ def push(repo, dest, force, revs): return 1 workingrev = repo.parents()[0] ui.status('searching for changes\n') - svn_commit_hashes = dict(zip(hge.revmap.itervalues(), - hge.revmap.iterkeys())) - outgoing = util.outgoing_revisions(ui, repo, hge, svn_commit_hashes, workingrev.node()) + hashes = meta.revmap.hashes() + outgoing = util.outgoing_revisions(repo, hashes, workingrev.node()) if not (outgoing and len(outgoing)): ui.status('no changes found\n') return 0 @@ -124,17 +143,16 @@ def push(repo, dest, force, revs): svnbranch = repo[base_n].branch() oldtip = base_n samebranchchildren = [c for c in repo[oldtip].children() if c.branch() == svnbranch - and c.node() in svn_commit_hashes] + and c.node() in hashes] while samebranchchildren: oldtip = samebranchchildren[0].node() samebranchchildren = [c for c in repo[oldtip].children() if c.branch() == svnbranch - and c.node() in svn_commit_hashes] + and c.node() in hashes] # 2. Commit oldest revision that needs to be pushed - base_revision = svn_commit_hashes[base_n][0] + base_revision = hashes[base_n][0] try: - cmdutil.commit_from_rev(ui, repo, old_ctx, hge, svnurl, - base_revision, user, passwd) - except cmdutil.NoFilesException: + pushmod.commit(ui, repo, old_ctx, meta, base_revision, svn) + except pushmod.NoFilesException: ui.warn("Could not push revision %s because it had no changes in svn.\n" % old_ctx) return 1 @@ -158,7 +176,7 @@ def push(repo, dest, force, revs): # TODO: can we avoid calling our own rebase wrapper here? rebase(hgrebase.rebase, ui, repo, svn=True, svnextrafn=extrafn, svnsourcerev=needs_transplant) - repo = hg.repository(ui, hge.path) + repo = hg.repository(ui, meta.path) for child in repo[replacement.node()].children(): rebasesrc = node.bin(child.extra().get('rebase_source', node.hex(node.nullid))) if rebasesrc in outgoing: @@ -170,9 +188,9 @@ def push(repo, dest, force, revs): if children: child = children[0] rebasesrc = node.bin(child.extra().get('rebase_source', node.hex(node.nullid))) - # TODO: stop constantly creating the HgChangeReceiver instances. - hge = hg_delta_editor.HgChangeReceiver(hge.repo, ui_=ui, uuid=svn.uuid) - svn_commit_hashes = dict(zip(hge.revmap.itervalues(), hge.revmap.iterkeys())) + # TODO: stop constantly creating the SVNMeta instances. + meta = repo.svnmeta(svn.uuid) + hashes = meta.revmap.hashes() util.swap_out_encoding(old_encoding) return 0 @@ -207,14 +225,11 @@ def pull(repo, source, heads=[], force=F repo.ui.note('fetching stupidly...\n') # TODO: do credentials specified in the URL still work? - user = repo.ui.config('hgsubversion', 'username') - passwd = repo.ui.config('hgsubversion', 'password') - svn = svnwrap.SubversionRepo(svn_url, user, passwd) - hg_editor = hg_delta_editor.HgChangeReceiver(repo=repo, subdir=svn.subdir, - uuid=svn.uuid) - - start = max(hg_editor.last_known_revision(), skipto_rev) - initializing_repo = (hg_editor.last_known_revision() <= 0) + svn = svnrepo.svnremoterepo(repo.ui, svn_url).svn + meta = repo.svnmeta(svn.uuid, svn.subdir) + + start = max(meta.revmap.seen, skipto_rev) + initializing_repo = meta.revmap.seen <= 0 ui = repo.ui if initializing_repo and start > 0: @@ -223,35 +238,55 @@ def pull(repo, source, heads=[], force=F revisions = 0 try: - # start converting revisions - for r in svn.revisions(start=start, stop=stopat_rev): - if (r.author is None and - r.message == 'This is an empty revision for padding.'): - continue - tbdelta = hg_editor.update_branch_tag_map_for_rev(r) - # got a 502? Try more than once! - tries = 0 - converted = False - while not converted: - try: - util.describe_revision(ui, r) - pullfuns[have_replay](ui, hg_editor, svn, r, tbdelta) - converted = True - except svnwrap.SubversionRepoCanNotReplay, e: #pragma: no cover - ui.status('%s\n' % e.message) - stupidmod.print_your_svn_is_old_message(ui) - have_replay = False - except core.SubversionException, e: #pragma: no cover - if (e.apr_err == core.SVN_ERR_RA_DAV_REQUEST_FAILED - and '502' in str(e) - and tries < 3): - tries += 1 - ui.status('Got a 502, retrying (%s)\n' % tries) - else: - raise hgutil.Abort(*e.args) - revisions += 1 - except KeyboardInterrupt: - pass + try: + # start converting revisions + for r in svn.revisions(start=start, stop=stopat_rev): + if (r.author is None and + r.message == 'This is an empty revision for padding.'): + continue + tbdelta = meta.update_branch_tag_map_for_rev(r) + # got a 502? Try more than once! + tries = 0 + converted = False + while not converted: + try: + + msg = r.message.strip() + if not msg: + msg = util.default_commit_msg + else: + msg = [s.strip() for s in msg.splitlines() if s][0] + w = hgutil.termwidth() + bits = (r.revnum, r.author, msg) + ui.status(('[r%d] %s: %s\n' % bits)[:w]) + + meta.save_tbdelta(tbdelta) + close = pullfuns[have_replay](ui, meta, svn, r, tbdelta) + if tbdelta['tags'][0] or tbdelta['tags'][1]: + meta.committags(tbdelta['tags'], r, close) + for branch, parent in close.iteritems(): + if parent in (None, node.nullid): + continue + meta.delbranch(branch, parent, r) + + meta.save() + converted = True + + except svnwrap.SubversionRepoCanNotReplay, e: #pragma: no cover + ui.status('%s\n' % e.message) + stupidmod.print_your_svn_is_old_message(ui) + have_replay = False + except core.SubversionException, e: #pragma: no cover + if (e.apr_err == core.SVN_ERR_RA_DAV_REQUEST_FAILED + and '502' in str(e) + and tries < 3): + tries += 1 + ui.status('Got a 502, retrying (%s)\n' % tries) + else: + raise hgutil.Abort(*e.args) + revisions += 1 + except KeyboardInterrupt: + pass finally: util.swap_out_encoding(old_encoding) @@ -279,10 +314,9 @@ def rebase(orig, ui, repo, **opts): extra['branch'] = ctx.branch() extrafn = opts.get('svnextrafn', extrafn2) sourcerev = opts.get('svnsourcerev', repo.parents()[0].node()) - hge = hg_delta_editor.HgChangeReceiver(repo=repo) - svn_commit_hashes = dict(zip(hge.revmap.itervalues(), - hge.revmap.iterkeys())) - o_r = util.outgoing_revisions(ui, repo, hge, svn_commit_hashes, sourcerev=sourcerev) + meta = repo.svnmeta() + hashes = meta.revmap.hashes() + o_r = util.outgoing_revisions(repo, hashes, sourcerev=sourcerev) if not o_r: ui.status('Nothing to rebase!\n') return 0 @@ -297,8 +331,7 @@ def rebase(orig, ui, repo, **opts): for c in target_rev.children(): exhausted_choices = True n = c.node() - if (n in svn_commit_hashes and - svn_commit_hashes[n][1] == svn_commit_hashes[p_n][1]): + if (n in hashes and hashes[n][1] == hashes[p_n][1]): target_rev = c exhausted_choices = False break @@ -308,3 +341,59 @@ def rebase(orig, ui, repo, **opts): return orig(ui, repo, dest=node.hex(target_rev.node()), base=node.hex(sourcerev), extrafn=extrafn) + + +optionmap = { + 'tagpaths': ('hgsubversion', 'tagpaths'), + 'authors': ('hgsubversion', 'authormap'), + 'filemap': ('hgsubversion', 'filemap'), + 'stupid': ('hgsubversion', 'stupid'), + 'defaulthost': ('hgsubversion', 'defaulthost'), + 'defaultauthors': ('hgsubversion', 'defaultauthors'), + 'usebranchnames': ('hgsubversion', 'usebranchnames'), +} + +dontretain = { 'hgsubversion': set(['authormap', 'filemap']) } + +def clone(orig, ui, source, dest=None, **opts): + """ + Some of the options listed below only apply to Subversion + %(target)s. See 'hg help %(extension)s' for more information on + them as well as other ways of customising the conversion process. + """ + + for opt, (section, name) in optionmap.iteritems(): + if opt in opts and opts[opt]: + ui.setconfig(section, name, str(opts.pop(opt))) + + # this must be kept in sync with mercurial/commands.py + srcrepo, dstrepo = hg.clone(cmdutil.remoteui(ui, opts), source, dest, + pull=opts.get('pull'), + stream=opts.get('uncompressed'), + rev=opts.get('rev'), + update=not opts.get('noupdate')) + + if dstrepo.local() and srcrepo.capable('subversion'): + fd = dstrepo.opener("hgrc", "a", text=True) + for section in set(s for s, v in optionmap.itervalues()): + config = dict(ui.configitems(section)) + for name in dontretain[section]: + config.pop(name, None) + + if config: + fd.write('\n[%s]\n' % section) + map(fd.write, ('%s = %s\n' % p for p in config.iteritems())) + + +def generic(orig, ui, repo, *args, **opts): + """ + Subversion %(target)s can be used for %(command)s. See 'hg help + %(extension)s' for more on the conversion process. + """ + for opt, (section, name) in optionmap.iteritems(): + if opt in opts and opts[opt]: + if isinstance(repo, str): + ui.setconfig(section, name, opts.pop(opt)) + else: + repo.ui.setconfig(section, name, opts.pop(opt)) + return orig(ui, repo, *args, **opts) diff --git a/tests/comprehensive/test_stupid_pull.py b/tests/comprehensive/test_stupid_pull.py --- a/tests/comprehensive/test_stupid_pull.py +++ b/tests/comprehensive/test_stupid_pull.py @@ -21,7 +21,7 @@ def _do_case(self, name): u.setconfig('hgsubversion', 'stupid', '1') hg.clone(u, test_util.fileurl(checkout_path), wc2_path, update=False) self.repo2 = hg.repository(ui.ui(), wc2_path) - self.assertEqual(self.repo.branchtags(), self.repo2.branchtags()) + self.assertEqual(self.repo.heads(), self.repo2.heads()) def buildmethod(case, name): diff --git a/tests/comprehensive/test_verify.py b/tests/comprehensive/test_verify.py new file mode 100644 --- /dev/null +++ b/tests/comprehensive/test_verify.py @@ -0,0 +1,47 @@ +import os +import pickle +import unittest + +# wrapped in a try/except because of weirdness in how +# run.py works as compared to nose. +try: + import test_util +except ImportError: + from tests import test_util + +from mercurial import hg +from mercurial import ui + +from hgsubversion import svncommands + +def _do_case(self, name, stupid): + subdir = test_util.subdir.get(name, '') + repo = self._load_fixture_and_fetch(name, subdir=subdir, stupid=stupid) + assert len(self.repo) > 0 + for i in repo: + ctx = repo[i] + self.assertEqual(svncommands.verify(repo.ui, repo, rev=ctx.node()), 0) + +def buildmethod(case, name, stupid): + m = lambda self: self._do_case(case, stupid) + m.__name__ = name + bits = case, stupid and 'stupid' or 'real' + m.__doc__ = 'Test verify on %s with %s replay.' % bits + return m + +attrs = {'_do_case': _do_case} +fixtures = [f for f in os.listdir(test_util.FIXTURES) if f.endswith('.svndump')] +for case in fixtures: + # this fixture results in an empty repository, don't use it + if case == 'project_root_not_repo_root.svndump': + continue + name = 'test_' + case[:-len('.svndump')] + attrs[name] = buildmethod(case, name, False) + name += '_stupid' + attrs[name] = buildmethod(case, name, True) + +VerifyTests = type('VerifyTests', (test_util.TestBase,), attrs) + +def suite(): + all = [unittest.TestLoader().loadTestsFromTestCase(VerifyTests)] + return unittest.TestSuite(all) diff --git a/tests/fixtures/author-map-test.txt b/tests/fixtures/author-map-test.txt new file mode 100644 --- /dev/null +++ b/tests/fixtures/author-map-test.txt @@ -0,0 +1,397 @@ +alpha01 = Alpha +alpha02 = Alpha +alpha03 = Alpha +alpha04 = Alpha +alpha05 = Alpha +alpha06 = Alpha +alpha07 = Alpha +alpha08 = Alpha +alpha09 = Alpha +alpha10 = Alpha +alpha11 = Alpha +alpha12 = Alpha +alpha13 = Alpha +alpha14 = Alpha +alpha15 = Alpha +alpha16 = Alpha +alpha17 = Alpha +alpha18 = Alpha +alpha19 = Alpha +alpha20 = Alpha +alpha21 = Alpha +alpha22 = Alph5 +alpha23 = Alpha +alpha24 = Alpha +alpha25 = Alpha +alpha26 = Alpha +alpha27 = Alpha +alpha28 = Alpha +alpha29 = Alpha +alpha30 = Alpha +alpha31 = Alpha +alpha32 = Alpha +alpha33 = Alpha +alpha34 = Alpha +alpha35 = Alpha +alpha36 = Alpha +alpha37 = Alph6 +alpha38 = Alpha +alpha39 = Alpha +alpha40 = Alpha +alpha41 = Alpha +alpha42 = Alpha +alpha43 = Alpha +alpha44 = Alph8 +alpha45 = Alpha +alpha46 = Alpha +alpha47 = Alpha +alpha48 = Alpha +alpha49 = Alpha +alpha50 = Alpha +alpha51 = Alpha +alpha52 = Alpha +alpha53 = Alpha +alpha54 = Alpha +alpha55 = Alpha +alpha56 = Alpha +alpha57 = Alph6 +alpha58 = Alpha +alpha59 = Alpha +alpha60 = Alpha +alpha61 = Alpha +alpha62 = Alpha +alpha63 = Alpha +alpha64 = Alpha +alpha65 = Alpha +alpha66 = Alpha +alpha67 = Alph6 +alpha68 = Alpha +alpha69 = Alpha +alpha70 = Alpha +alpha71 = Alpha +alpha72 = Alpha +alpha73 = Alpha +alpha74 = Alpha +alpha75 = Alpha +alpha76 = Alpha +alpha77 = Alph6 +alpha78 = Alpha +alpha79 = Alpha +alpha80 = Alpha +alpha81 = Alpha +alpha82 = Alpha +alpha83 = Alpha +alpha84 = Alpha +alpha85 = Alpha +alpha86 = Alpha +alpha87 = Alpha +alpha88 = Alpha +alpha89 = Alpha +alpha90 = Alpha +alpha91 = Alpha +alpha92 = Alpha +alpha93 = Alpha +alpha94 = Alpha +alpha95 = Alpha +alpha96 = Alpha +alpha97 = Alpha +alpha98 = Alpha +alpha99 = Alpha +alpha101 = Alpha +alpha102 = Alpha +alpha103 = Alpha +alpha104 = Alpha +alpha105 = Alpha +alpha106 = Alpha +alpha107 = Alpha +alpha108 = Alpha +alpha109 = Alpha +alpha110 = Alpha +alpha111 = Alpha +alpha112 = Alpha +alpha113 = Alpha +alpha114 = Alpha +alpha115 = Alpha +alpha116 = Alpha +alpha117 = Alpha +alpha118 = Alpha +alpha119 = Alpha +alpha120 = Alpha +alpha121 = Alpha +alpha122 = Alph5 +alpha123 = Alpha +alpha124 = Alpha +alpha125 = Alpha +alpha126 = Alpha +alpha127 = Alpha +alpha128 = Alpha +alpha129 = Alpha +alpha130 = Alpha +alpha131 = Alpha +alpha132 = Alpha +alpha133 = Alpha +alpha134 = Alpha +alpha135 = Alpha +alpha136 = Alpha +alpha137 = Alph6 +alpha138 = Alpha +alpha139 = Alpha +alpha140 = Alpha +alpha141 = Alpha +alpha142 = Alpha +alpha143 = Alpha +alpha144 = Alph8 +alpha145 = Alpha +alpha146 = Alpha +alpha147 = Alpha +alpha148 = Alpha +alpha149 = Alpha +alpha150 = Alpha +alpha151 = Alpha +alpha152 = Alpha +alpha153 = Alpha +alpha154 = Alpha +alpha155 = Alpha +alpha156 = Alpha +alpha157 = Alph6 +alpha158 = Alpha +alpha159 = Alpha +alpha160 = Alpha +alpha161 = Alpha +alpha162 = Alpha +alpha163 = Alpha +alpha164 = Alpha +alpha165 = Alpha +alpha166 = Alpha +alpha167 = Alph6 +alpha168 = Alpha +alpha169 = Alpha +alpha170 = Alpha +alpha171 = Alpha +alpha172 = Alpha +alpha173 = Alpha +alpha174 = Alpha +alpha175 = Alpha +alpha176 = Alpha +alpha177 = Alph6 +alpha178 = Alpha +alpha179 = Alpha +alpha180 = Alpha +alpha181 = Alpha +alpha182 = Alpha +alpha183 = Alpha +alpha184 = Alpha +alpha185 = Alpha +alpha186 = Alpha +alpha187 = Alpha +alpha188 = Alpha +alpha189 = Alpha +alpha190 = Alpha +alpha191 = Alpha +alpha192 = Alpha +alpha193 = Alpha +alpha194 = Alpha +alpha195 = Alpha +alpha196 = Alpha +alpha197 = Alpha +alpha198 = Alpha +alpha199 = Alpha +alpha201 = Alpha +alpha202 = Alpha +alpha203 = Alpha +alpha204 = Alpha +alpha205 = Alpha +alpha206 = Alpha +alpha207 = Alpha +alpha208 = Alpha +alpha209 = Alpha +alpha210 = Alpha +alpha211 = Alpha +alpha212 = Alpha +alpha213 = Alpha +alpha214 = Alpha +alpha215 = Alpha +alpha216 = Alpha +alpha217 = Alpha +alpha218 = Alpha +alpha219 = Alpha +alpha220 = Alpha +alpha221 = Alpha +alpha222 = Alph5 +alpha223 = Alpha +alpha224 = Alpha +alpha225 = Alpha +alpha226 = Alpha +alpha227 = Alpha +alpha228 = Alpha +alpha229 = Alpha +alpha230 = Alpha +alpha231 = Alpha +alpha232 = Alpha +alpha233 = Alpha +alpha234 = Alpha +alpha235 = Alpha +alpha236 = Alpha +alpha237 = Alph6 +alpha238 = Alpha +alpha239 = Alpha +alpha240 = Alpha +alpha241 = Alpha +alpha242 = Alpha +alpha243 = Alpha +alpha244 = Alph8 +alpha245 = Alpha +alpha246 = Alpha +alpha247 = Alpha +alpha248 = Alpha +alpha249 = Alpha +alpha250 = Alpha +alpha251 = Alpha +alpha252 = Alpha +alpha253 = Alpha +alpha254 = Alpha +alpha255 = Alpha +alpha256 = Alpha +alpha257 = Alph6 +alpha258 = Alpha +alpha259 = Alpha +alpha260 = Alpha +alpha261 = Alpha +alpha262 = Alpha +alpha263 = Alpha +alpha264 = Alpha +alpha265 = Alpha +alpha266 = Alpha +alpha267 = Alph6 +alpha268 = Alpha +alpha269 = Alpha +alpha270 = Alpha +alpha271 = Alpha +alpha272 = Alpha +alpha273 = Alpha +alpha274 = Alpha +alpha275 = Alpha +alpha276 = Alpha +alpha277 = Alph6 +alpha278 = Alpha +alpha279 = Alpha +alpha280 = Alpha +alpha281 = Alpha +alpha282 = Alpha +alpha283 = Alpha +alpha284 = Alpha +alpha285 = Alpha +alpha286 = Alpha +alpha287 = Alpha +alpha288 = Alpha +alpha289 = Alpha +alpha290 = Alpha +alpha291 = Alpha +alpha292 = Alpha +alpha293 = Alpha +alpha294 = Alpha +alpha295 = Alpha +alpha296 = Alpha +alpha297 = Alpha +alpha298 = Alpha +alpha299 = Alpha +alpha301 = Alpha +alpha302 = Alpha +alpha303 = Alpha +alpha304 = Alpha +alpha305 = Alpha +alpha306 = Alpha +alpha307 = Alpha +alpha308 = Alpha +alpha309 = Alpha +alpha310 = Alpha +alpha311 = Alpha +alpha312 = Alpha +alpha313 = Alpha +alpha314 = Alpha +alpha315 = Alpha +alpha316 = Alpha +alpha317 = Alpha +alpha318 = Alpha +alpha319 = Alpha +alpha320 = Alpha +alpha321 = Alpha +alpha322 = Alph5 +alpha323 = Alpha +alpha324 = Alpha +alpha325 = Alpha +alpha326 = Alpha +alpha327 = Alpha +alpha328 = Alpha +alpha329 = Alpha +alpha330 = Alpha +alpha331 = Alpha +alpha332 = Alpha +alpha333 = Alpha +alpha334 = Alpha +alpha335 = Alpha +alpha336 = Alpha +alpha337 = Alph6 +alpha338 = Alpha +alpha339 = Alpha +alpha340 = Alpha +alpha341 = Alpha +alpha342 = Alpha +alpha343 = Alpha +alpha344 = Alph8 +alpha345 = Alpha +alpha346 = Alpha +alpha347 = Alpha +alpha348 = Alpha +alpha349 = Alpha +alpha350 = Alpha +alpha351 = Alpha +alpha352 = Alpha +alpha353 = Alpha +alpha354 = Alpha +alpha355 = Alpha +alpha356 = Alpha +alpha357 = Alph6 +alpha358 = Alpha +alpha359 = Alpha +alpha360 = Alpha +alpha361 = Alpha +alpha362 = Alpha +alpha363 = Alpha +alpha364 = Alpha +alpha365 = Alpha +alpha366 = Alpha +alpha367 = Alph6 +alpha368 = Alpha +alpha639 = Alpha +alpha370 = Alpha +alpha371 = Alpha +alpha372 = Alpha +alpha373 = Alpha +alpha374 = Alpha +alpha375 = Alpha +alpha376 = Alpha +alpha377 = Alph6 +alpha738 = Alpha +alpha379 = Alpha +alpha380 = Alpha +alpha381 = Alpha +alpha382 = Alpha +alpha383 = Alpha +alpha384 = Alpha +alpha385 = Alpha +alpha386 = Alpha +alpha387 = Alpha +alpha388 = Alpha +alpha389 = Alpha +alpha390 = Alpha +alpha91 = Alpha +alph3a92 = Alpha +alph3a93 = Alpha +alph3a94 = Alpha +alph3a95 = Alpha +alph3a96 = Alpha +alph3a97 = Alpha +alph3a98 = Alpha +alpha99 = Alpha +dsadsakdoa = dksadosakfa diff --git a/tests/fixtures/branch_delete_parent_dir.sh b/tests/fixtures/branch_delete_parent_dir.sh new file mode 100755 --- /dev/null +++ b/tests/fixtures/branch_delete_parent_dir.sh @@ -0,0 +1,26 @@ +#!/bin/sh +mkdir temp +cd temp +svnadmin create repo +svn co file://`pwd`/repo wc +cd wc +mkdir branches trunk tags +svn add * +svn ci -m 'btt' +echo foo > trunk/foo +svn add trunk/foo +svn ci -m 'add file' +svn up +svn cp trunk branches/dev_branch +svn ci -m 'branch' +svn up +svn rm branches +svn ci -m 'delete branches dir' +cd .. +cd .. +svnadmin dump temp/repo > branch_delete_parent_dir.svndump +echo +echo 'Complete.' +echo 'You probably want to clean up temp now.' +echo 'Dump in branch_delete_parent_dir.svndump' +exit 0 diff --git a/tests/fixtures/branch_delete_parent_dir.svndump b/tests/fixtures/branch_delete_parent_dir.svndump new file mode 100644 --- /dev/null +++ b/tests/fixtures/branch_delete_parent_dir.svndump @@ -0,0 +1,137 @@ +SVN-fs-dump-format-version: 2 + +UUID: 60132d6f-a460-4b38-8ae6-633264894f73 + +Revision-number: 0 +Prop-content-length: 56 +Content-length: 56 + +K 8 +svn:date +V 27 +2009-06-30T23:57:20.562207Z +PROPS-END + +Revision-number: 1 +Prop-content-length: 101 +Content-length: 101 + +K 7 +svn:log +V 3 +btt +K 10 +svn:author +V 4 +maxb +K 8 +svn:date +V 27 +2009-06-30T23:57:21.078798Z +PROPS-END + +Node-path: branches +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: tags +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: trunk +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Revision-number: 2 +Prop-content-length: 106 +Content-length: 106 + +K 7 +svn:log +V 8 +add file +K 10 +svn:author +V 4 +maxb +K 8 +svn:date +V 27 +2009-06-30T23:57:22.098826Z +PROPS-END + +Node-path: trunk/foo +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 4 +Text-content-md5: d3b07384d113edec49eaa6238ad5ff00 +Text-content-sha1: f1d2d2f924e986ac86fdf7b36c94bcdf32beec15 +Content-length: 14 + +PROPS-END +foo + + +Revision-number: 3 +Prop-content-length: 104 +Content-length: 104 + +K 7 +svn:log +V 6 +branch +K 10 +svn:author +V 4 +maxb +K 8 +svn:date +V 27 +2009-06-30T23:57:25.055724Z +PROPS-END + +Node-path: branches/dev_branch +Node-kind: dir +Node-action: add +Node-copyfrom-rev: 2 +Node-copyfrom-path: trunk + + +Revision-number: 4 +Prop-content-length: 118 +Content-length: 118 + +K 7 +svn:log +V 19 +delete branches dir +K 10 +svn:author +V 4 +maxb +K 8 +svn:date +V 27 +2009-06-30T23:57:27.072926Z +PROPS-END + +Node-path: branches +Node-action: delete + + diff --git a/tests/fixtures/commit-to-tag.sh b/tests/fixtures/commit-to-tag.sh new file mode 100755 --- /dev/null +++ b/tests/fixtures/commit-to-tag.sh @@ -0,0 +1,74 @@ +#!/bin/sh +mkdir temp +cd temp +svnadmin create repo +REPOPATH="file://`pwd`/repo" +svn co $REPOPATH wc +cd wc +mkdir -p branches/magic trunk tags +svn add * +svn ci -m 'btt' +cd branches/magic +for a in alpha beta gamma; do + echo $a > $a + svn add $a + svn ci -m "Add file $a" +done +cd ../.. +svn up +svn cp $REPOPATH/branches/magic $REPOPATH/tags/will-edit -m 'Make tag to edit' +svn up + +cd branches/magic +for a in delta iota lambda; do + echo $a > $a + svn add $a + svn ci -m "Add file $a" +done +cd ../.. + +cd tags/will-edit +svn rm alpha +svn ci -m 'removed alpha on a tag. Moves tag, implicit branch.' +cd ../.. + +cd branches/magic +for a in omega; do + echo $a > $a + svn add $a + svn ci -m "Add file $a" +done +cd ../.. +svn up +svn cp $REPOPATH/branches/magic $REPOPATH/tags/also-edit -m 'Make tag to edit' +svn up + +echo not omega > branches/magic/omega +echo not omega > tags/also-edit/omega +svn ci -m 'edit both the tag and its source branch at the same time' + +echo more stupidity > tags/also-edit/omega +svn ci -m 'Edit an edited tag.' + +svn cp $REPOPATH/tags/also-edit $REPOPATH/tags/did-edits -m 'Tag an edited tag' + +svn cp $REPOPATH/branches/magic $REPOPATH/branches/closeme -m 'Make extra branch for another bogus case' +svn cp $REPOPATH/branches/closeme $REPOPATH/tags/edit-later -m 'Make tag to edit after branch closes' +svn rm $REPOPATH/branches/closeme -m 'Close the branch' +svn up +echo boofar > tags/edit-later/delta +svn ci -m 'Edit this tag after its parent closed' + +# try and revert will-edit to its original state +svn up +svn merge -r9:8 $REPOPATH . +svn ci -m 'Revert revision 9.' + + +cd ../.. +svnadmin dump temp/repo > commit-to-tag.svndump +echo +echo 'Complete.' +echo 'You probably want to clean up temp now.' +echo 'Dump in commit-to-tag.svndump' +exit 0 diff --git a/tests/fixtures/commit-to-tag.svndump b/tests/fixtures/commit-to-tag.svndump new file mode 100644 --- /dev/null +++ b/tests/fixtures/commit-to-tag.svndump @@ -0,0 +1,565 @@ +SVN-fs-dump-format-version: 2 + +UUID: af82cc90-c2d2-43cd-b1aa-c8a78449440a + +Revision-number: 0 +Prop-content-length: 56 +Content-length: 56 + +K 8 +svn:date +V 27 +2009-06-24T02:53:15.860217Z +PROPS-END + +Revision-number: 1 +Prop-content-length: 102 +Content-length: 102 + +K 7 +svn:log +V 3 +btt +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-06-24T02:53:17.530891Z +PROPS-END + +Node-path: branches +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: branches/magic +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: tags +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: trunk +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Revision-number: 2 +Prop-content-length: 114 +Content-length: 114 + +K 7 +svn:log +V 14 +Add file alpha +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-06-24T02:53:18.122469Z +PROPS-END + +Node-path: branches/magic/alpha +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 6 +Text-content-md5: 9f9f90dbe3e5ee1218c86b8839db1995 +Content-length: 16 + +PROPS-END +alpha + + +Revision-number: 3 +Prop-content-length: 113 +Content-length: 113 + +K 7 +svn:log +V 13 +Add file beta +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-06-24T02:53:19.101676Z +PROPS-END + +Node-path: branches/magic/beta +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 5 +Text-content-md5: f0cf2a92516045024a0c99147b28f05b +Content-length: 15 + +PROPS-END +beta + + +Revision-number: 4 +Prop-content-length: 114 +Content-length: 114 + +K 7 +svn:log +V 14 +Add file gamma +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-06-24T02:53:20.081433Z +PROPS-END + +Node-path: branches/magic/gamma +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 6 +Text-content-md5: 303febb9068384eca46b5b6516843b35 +Content-length: 16 + +PROPS-END +gamma + + +Revision-number: 5 +Prop-content-length: 116 +Content-length: 116 + +K 7 +svn:log +V 16 +Make tag to edit +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-06-24T02:53:22.103708Z +PROPS-END + +Node-path: tags/will-edit +Node-kind: dir +Node-action: add +Node-copyfrom-rev: 4 +Node-copyfrom-path: branches/magic + + +Revision-number: 6 +Prop-content-length: 114 +Content-length: 114 + +K 7 +svn:log +V 14 +Add file delta +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-06-24T02:53:23.077083Z +PROPS-END + +Node-path: branches/magic/delta +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 6 +Text-content-md5: d2840cc81bc032bd1141b56687d0f93c +Content-length: 16 + +PROPS-END +delta + + +Revision-number: 7 +Prop-content-length: 113 +Content-length: 113 + +K 7 +svn:log +V 13 +Add file iota +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-06-24T02:53:24.107494Z +PROPS-END + +Node-path: branches/magic/iota +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 5 +Text-content-md5: ebcf3971120220589f1dfbf8d56e25b9 +Content-length: 15 + +PROPS-END +iota + + +Revision-number: 8 +Prop-content-length: 115 +Content-length: 115 + +K 7 +svn:log +V 15 +Add file lambda +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-06-24T02:53:25.092428Z +PROPS-END + +Node-path: branches/magic/lambda +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 7 +Text-content-md5: 8c8a4646591ee0d9a43d3149320ed577 +Content-length: 17 + +PROPS-END +lambda + + +Revision-number: 9 +Prop-content-length: 151 +Content-length: 151 + +K 7 +svn:log +V 51 +removed alpha on a tag. Moves tag, implicit branch. +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-06-24T02:53:26.443673Z +PROPS-END + +Node-path: tags/will-edit/alpha +Node-action: delete + + +Revision-number: 10 +Prop-content-length: 114 +Content-length: 114 + +K 7 +svn:log +V 14 +Add file omega +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-06-24T02:53:27.067556Z +PROPS-END + +Node-path: branches/magic/omega +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 6 +Text-content-md5: 14723c69541ee556d75c581b787dc217 +Content-length: 16 + +PROPS-END +omega + + +Revision-number: 11 +Prop-content-length: 116 +Content-length: 116 + +K 7 +svn:log +V 16 +Make tag to edit +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-06-24T02:53:29.049217Z +PROPS-END + +Node-path: tags/also-edit +Node-kind: dir +Node-action: add +Node-copyfrom-rev: 10 +Node-copyfrom-path: branches/magic + + +Revision-number: 12 +Prop-content-length: 156 +Content-length: 156 + +K 7 +svn:log +V 56 +edit both the tag and its source branch at the same time +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-06-24T02:53:30.063026Z +PROPS-END + +Node-path: branches/magic/omega +Node-kind: file +Node-action: change +Text-content-length: 10 +Text-content-md5: 9b26a47955b0778e131aae04743f2b8c +Content-length: 10 + +not omega + + +Node-path: tags/also-edit/omega +Node-kind: file +Node-action: change +Text-content-length: 10 +Text-content-md5: 9b26a47955b0778e131aae04743f2b8c +Content-length: 10 + +not omega + + +Revision-number: 13 +Prop-content-length: 119 +Content-length: 119 + +K 7 +svn:log +V 19 +Edit an edited tag. +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-06-26T15:28:20.055574Z +PROPS-END + +Node-path: tags/also-edit/omega +Node-kind: file +Node-action: change +Text-content-length: 15 +Text-content-md5: a8d56f18cc28a34d6fe2cee5291ac1cc +Content-length: 15 + +more stupidity + + +Revision-number: 14 +Prop-content-length: 117 +Content-length: 117 + +K 7 +svn:log +V 17 +Tag an edited tag +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-06-26T15:57:06.047408Z +PROPS-END + +Node-path: tags/did-edits +Node-kind: dir +Node-action: add +Node-copyfrom-rev: 13 +Node-copyfrom-path: tags/also-edit + + +Revision-number: 15 +Prop-content-length: 140 +Content-length: 140 + +K 7 +svn:log +V 40 +Make extra branch for another bogus case +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-06-26T19:26:28.086924Z +PROPS-END + +Node-path: branches/closeme +Node-kind: dir +Node-action: add +Node-copyfrom-rev: 14 +Node-copyfrom-path: branches/magic + + +Revision-number: 16 +Prop-content-length: 136 +Content-length: 136 + +K 7 +svn:log +V 36 +Make tag to edit after branch closes +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-06-26T19:26:28.119751Z +PROPS-END + +Node-path: tags/edit-later +Node-kind: dir +Node-action: add +Node-copyfrom-rev: 15 +Node-copyfrom-path: branches/closeme + + +Revision-number: 17 +Prop-content-length: 116 +Content-length: 116 + +K 7 +svn:log +V 16 +Close the branch +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-06-29T00:12:57.367624Z +PROPS-END + +Node-path: branches/closeme +Node-action: delete + + +Revision-number: 18 +Prop-content-length: 137 +Content-length: 137 + +K 7 +svn:log +V 37 +Edit this tag after its parent closed +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-06-26T19:26:29.059216Z +PROPS-END + +Node-path: tags/edit-later/delta +Node-kind: file +Node-action: change +Text-content-length: 7 +Text-content-md5: 5bbd00dab68c937673171d0b2e205c96 +Content-length: 7 + +boofar + + +Revision-number: 19 +Prop-content-length: 118 +Content-length: 118 + +K 7 +svn:log +V 18 +Revert revision 9. +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-06-29T00:13:01.537589Z +PROPS-END + +Node-path: tags/will-edit/alpha +Node-kind: file +Node-action: add +Node-copyfrom-rev: 8 +Node-copyfrom-path: tags/will-edit/alpha +Text-copy-source-md5: 9f9f90dbe3e5ee1218c86b8839db1995 diff --git a/tests/fixtures/copybeforeclose.sh b/tests/fixtures/copybeforeclose.sh new file mode 100755 --- /dev/null +++ b/tests/fixtures/copybeforeclose.sh @@ -0,0 +1,39 @@ +#!/bin/sh + +mkdir temp +cd temp +svnadmin create repo +svn co file://`pwd`/repo wc +cd wc +mkdir branches trunk tags +svn add * +svn ci -m 'btt' +cd trunk + +echo a > a +svn add a +svn ci -m 'Add file.' +svn up + +cd .. +svn cp trunk branches/test +svn ci -m 'Branch.' +svn up + +cd branches/test/ +svn mv a b +svn ci -m 'Move on branch.' +svn up + +cd ../../ +svn up +svn rm branches/test +svn ci -m 'Close branch.' + +cd ../.. +svnadmin dump temp/repo > copybeforeclose.svndump +echo +echo 'Complete.' +echo 'You probably want to clean up temp now.' +echo 'Dump in copybeforeclose.svndump' +exit 0 diff --git a/tests/fixtures/copybeforeclose.svndump b/tests/fixtures/copybeforeclose.svndump new file mode 100644 --- /dev/null +++ b/tests/fixtures/copybeforeclose.svndump @@ -0,0 +1,166 @@ +SVN-fs-dump-format-version: 2 + +UUID: 1e1f7d3f-4361-4205-84f8-c0d471d161d2 + +Revision-number: 0 +Prop-content-length: 56 +Content-length: 56 + +K 8 +svn:date +V 27 +2009-06-11T15:51:46.768965Z +PROPS-END + +Revision-number: 1 +Prop-content-length: 100 +Content-length: 100 + +K 7 +svn:log +V 3 +btt +K 10 +svn:author +V 3 +djc +K 8 +svn:date +V 27 +2009-06-11T15:51:47.134555Z +PROPS-END + +Node-path: branches +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: tags +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: trunk +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Revision-number: 2 +Prop-content-length: 106 +Content-length: 106 + +K 7 +svn:log +V 9 +Add file. +K 10 +svn:author +V 3 +djc +K 8 +svn:date +V 27 +2009-06-11T15:51:48.129578Z +PROPS-END + +Node-path: trunk/a +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 2 +Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3 +Content-length: 12 + +PROPS-END +a + + +Revision-number: 3 +Prop-content-length: 104 +Content-length: 104 + +K 7 +svn:log +V 7 +Branch. +K 10 +svn:author +V 3 +djc +K 8 +svn:date +V 27 +2009-06-11T15:51:51.120532Z +PROPS-END + +Node-path: branches/test +Node-kind: dir +Node-action: add +Node-copyfrom-rev: 2 +Node-copyfrom-path: trunk + + +Revision-number: 4 +Prop-content-length: 113 +Content-length: 113 + +K 7 +svn:log +V 15 +Move on branch. +K 10 +svn:author +V 3 +djc +K 8 +svn:date +V 27 +2009-06-11T15:51:54.124503Z +PROPS-END + +Node-path: branches/test/b +Node-kind: file +Node-action: add +Node-copyfrom-rev: 3 +Node-copyfrom-path: branches/test/a +Text-copy-source-md5: 60b725f10c9c85c70d97880dfe8191b3 + + +Node-path: branches/test/a +Node-action: delete + + +Revision-number: 5 +Prop-content-length: 111 +Content-length: 111 + +K 7 +svn:log +V 13 +Close branch. +K 10 +svn:author +V 3 +djc +K 8 +svn:date +V 27 +2009-06-11T15:51:57.130547Z +PROPS-END + +Node-path: branches/test +Node-action: delete + + diff --git a/tests/fixtures/unusual_tags.sh b/tests/fixtures/most-recent-is-edit-tag.sh copy from tests/fixtures/unusual_tags.sh copy to tests/fixtures/most-recent-is-edit-tag.sh --- a/tests/fixtures/unusual_tags.sh +++ b/tests/fixtures/most-recent-is-edit-tag.sh @@ -27,16 +27,15 @@ svn add iota svn ci -m 'branch changes' cd ../.. svn up -svn cp branches/dev_branch tags/versions/branch_version -svn ci -m 'Make a tag in tags/versions from branches/dev_branch' -svn up -svn cp trunk tags/blah/trunktag -svn ci -m 'Make a tag in tags/blah from trunk' +svn cp branches/dev_branch tags/some-tag +svn ci -m 'Make a tag.' svn up +echo foo > tags/some-tag/alpha +svn ci -m 'edit that tag' cd ../.. -svnadmin dump temp/repo > unusual_tags.svndump +svnadmin dump temp/repo > most-recent-is-edit-tag.svndump echo echo 'Complete.' echo 'You probably want to clean up temp now.' -echo 'Dump in unusual_tags.svndump' +echo 'Dump in most-recent-is-edit-tag.svndump' exit 0 diff --git a/tests/fixtures/most-recent-is-edit-tag.svndump b/tests/fixtures/most-recent-is-edit-tag.svndump new file mode 100644 --- /dev/null +++ b/tests/fixtures/most-recent-is-edit-tag.svndump @@ -0,0 +1,265 @@ +SVN-fs-dump-format-version: 2 + +UUID: 65efcde9-3b35-4f89-9c6b-23da1cf93d9b + +Revision-number: 0 +Prop-content-length: 56 +Content-length: 56 + +K 8 +svn:date +V 27 +2009-07-19T03:04:55.657240Z +PROPS-END + +Revision-number: 1 +Prop-content-length: 102 +Content-length: 102 + +K 7 +svn:log +V 3 +btt +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-07-19T03:04:56.082834Z +PROPS-END + +Node-path: branches +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: tags +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: tags/blah +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: tags/versions +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: trunk +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Revision-number: 2 +Prop-content-length: 110 +Content-length: 110 + +K 7 +svn:log +V 10 +Add files. +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-07-19T03:04:57.109656Z +PROPS-END + +Node-path: trunk/alpha +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 6 +Text-content-md5: 9f9f90dbe3e5ee1218c86b8839db1995 +Content-length: 16 + +PROPS-END +alpha + + +Node-path: trunk/beta +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 5 +Text-content-md5: f0cf2a92516045024a0c99147b28f05b +Content-length: 15 + +PROPS-END +beta + + +Node-path: trunk/delta +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 6 +Text-content-md5: d2840cc81bc032bd1141b56687d0f93c +Content-length: 16 + +PROPS-END +delta + + +Node-path: trunk/gamma +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 6 +Text-content-md5: 303febb9068384eca46b5b6516843b35 +Content-length: 16 + +PROPS-END +gamma + + +Revision-number: 3 +Prop-content-length: 105 +Content-length: 105 + +K 7 +svn:log +V 6 +branch +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-07-19T03:05:00.048169Z +PROPS-END + +Node-path: branches/dev_branch +Node-kind: dir +Node-action: add +Node-copyfrom-rev: 2 +Node-copyfrom-path: trunk + + +Revision-number: 4 +Prop-content-length: 114 +Content-length: 114 + +K 7 +svn:log +V 14 +branch changes +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-07-19T03:05:01.081946Z +PROPS-END + +Node-path: branches/dev_branch/alpha +Node-kind: file +Node-action: change +Text-content-length: 5 +Text-content-md5: 5e723ed52db2000686425ca28bc5ba4a +Content-length: 5 + +narf + + +Node-path: branches/dev_branch/iota +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 5 +Text-content-md5: ebcf3971120220589f1dfbf8d56e25b9 +Content-length: 15 + +PROPS-END +iota + + +Node-path: branches/dev_branch/delta +Node-action: delete + + +Revision-number: 5 +Prop-content-length: 111 +Content-length: 111 + +K 7 +svn:log +V 11 +Make a tag. +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-07-19T03:05:04.056268Z +PROPS-END + +Node-path: tags/some-tag +Node-kind: dir +Node-action: add +Node-copyfrom-rev: 4 +Node-copyfrom-path: branches/dev_branch + + +Revision-number: 6 +Prop-content-length: 113 +Content-length: 113 + +K 7 +svn:log +V 13 +edit that tag +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-07-19T03:05:06.057723Z +PROPS-END + +Node-path: tags/some-tag/alpha +Node-kind: file +Node-action: change +Text-content-length: 4 +Text-content-md5: d3b07384d113edec49eaa6238ad5ff00 +Content-length: 4 + +foo + + diff --git a/tests/fixtures/tag_name_same_as_branch.sh b/tests/fixtures/tag_name_same_as_branch.sh new file mode 100755 --- /dev/null +++ b/tests/fixtures/tag_name_same_as_branch.sh @@ -0,0 +1,29 @@ +#!/bin/sh +mkdir temp +cd temp +svnadmin create repo +REPOPATH="file://`pwd`/repo" +svn co $REPOPATH wc +cd wc +mkdir -p branches/magic trunk tags +svn add * +svn ci -m 'btt' +cd branches/magic +for a in alpha beta gamma delta iota zeta eta theta ; do + echo $a > $a + svn add $a + svn ci -m "Add file $a" +done +cd ../.. +svn up +svn cp $REPOPATH/branches/magic $REPOPATH/tags/magic -m 'Make magic tag' +svn rm $REPOPATH/branches/magic/theta -m 'remove a file' +svn cp $REPOPATH/branches/magic $REPOPATH/tags/magic2 -m 'Tag magic again' + +cd ../.. +svnadmin dump temp/repo > tag_name_same_as_branch.svndump +echo +echo 'Complete.' +echo 'You probably want to clean up temp now.' +echo 'Dump in tag_name_same_as_branch.svndump' +exit 0 diff --git a/tests/fixtures/tag_name_same_as_branch.svndump b/tests/fixtures/tag_name_same_as_branch.svndump new file mode 100644 --- /dev/null +++ b/tests/fixtures/tag_name_same_as_branch.svndump @@ -0,0 +1,388 @@ +SVN-fs-dump-format-version: 2 + +UUID: 8ede5731-e772-49cc-a297-c19c9844b692 + +Revision-number: 0 +Prop-content-length: 56 +Content-length: 56 + +K 8 +svn:date +V 27 +2009-06-01T16:09:33.850620Z +PROPS-END + +Revision-number: 1 +Prop-content-length: 105 +Content-length: 105 + +K 7 +svn:log +V 3 +btt +K 10 +svn:author +V 8 +afackler +K 8 +svn:date +V 27 +2009-06-01T16:09:34.084216Z +PROPS-END + +Node-path: branches +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: branches/magic +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: tags +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: trunk +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Revision-number: 2 +Prop-content-length: 117 +Content-length: 117 + +K 7 +svn:log +V 14 +Add file alpha +K 10 +svn:author +V 8 +afackler +K 8 +svn:date +V 27 +2009-06-01T16:09:35.078795Z +PROPS-END + +Node-path: branches/magic/alpha +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 6 +Text-content-md5: 9f9f90dbe3e5ee1218c86b8839db1995 +Text-content-sha1: d046cd9b7ffb7661e449683313d41f6fc33e3130 +Content-length: 16 + +PROPS-END +alpha + + +Revision-number: 3 +Prop-content-length: 116 +Content-length: 116 + +K 7 +svn:log +V 13 +Add file beta +K 10 +svn:author +V 8 +afackler +K 8 +svn:date +V 27 +2009-06-01T16:09:36.385254Z +PROPS-END + +Node-path: branches/magic/beta +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 5 +Text-content-md5: f0cf2a92516045024a0c99147b28f05b +Text-content-sha1: 6c007a14875d53d9bf0ef5a6fc0257c817f0fb83 +Content-length: 15 + +PROPS-END +beta + + +Revision-number: 4 +Prop-content-length: 117 +Content-length: 117 + +K 7 +svn:log +V 14 +Add file gamma +K 10 +svn:author +V 8 +afackler +K 8 +svn:date +V 27 +2009-06-01T16:09:37.081077Z +PROPS-END + +Node-path: branches/magic/gamma +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 6 +Text-content-md5: 303febb9068384eca46b5b6516843b35 +Text-content-sha1: 37f385b028bf2f93a4b497ca9ff44eea63945b7f +Content-length: 16 + +PROPS-END +gamma + + +Revision-number: 5 +Prop-content-length: 117 +Content-length: 117 + +K 7 +svn:log +V 14 +Add file delta +K 10 +svn:author +V 8 +afackler +K 8 +svn:date +V 27 +2009-06-01T16:09:38.071461Z +PROPS-END + +Node-path: branches/magic/delta +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 6 +Text-content-md5: d2840cc81bc032bd1141b56687d0f93c +Text-content-sha1: 4bd6315d6d7824c4e376847ca7d116738ad2f29a +Content-length: 16 + +PROPS-END +delta + + +Revision-number: 6 +Prop-content-length: 116 +Content-length: 116 + +K 7 +svn:log +V 13 +Add file iota +K 10 +svn:author +V 8 +afackler +K 8 +svn:date +V 27 +2009-06-01T16:09:39.073363Z +PROPS-END + +Node-path: branches/magic/iota +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 5 +Text-content-md5: ebcf3971120220589f1dfbf8d56e25b9 +Text-content-sha1: 47e9aceee5149402971cda8590e9b912c1b1053e +Content-length: 15 + +PROPS-END +iota + + +Revision-number: 7 +Prop-content-length: 116 +Content-length: 116 + +K 7 +svn:log +V 13 +Add file zeta +K 10 +svn:author +V 8 +afackler +K 8 +svn:date +V 27 +2009-06-01T16:09:40.082055Z +PROPS-END + +Node-path: branches/magic/zeta +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 5 +Text-content-md5: 2db8f255a13ae1e49099d9dad57b4a37 +Text-content-sha1: 2c5ea36ead157ee3089bcd883f26ea2c899b2521 +Content-length: 15 + +PROPS-END +zeta + + +Revision-number: 8 +Prop-content-length: 115 +Content-length: 115 + +K 7 +svn:log +V 12 +Add file eta +K 10 +svn:author +V 8 +afackler +K 8 +svn:date +V 27 +2009-06-01T16:09:41.072675Z +PROPS-END + +Node-path: branches/magic/eta +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 4 +Text-content-md5: 655d1abbe3e97298b0948ba964ad2522 +Text-content-sha1: 3c106a7ec1fe510152eebcf2cc76e135cca63f62 +Content-length: 14 + +PROPS-END +eta + + +Revision-number: 9 +Prop-content-length: 117 +Content-length: 117 + +K 7 +svn:log +V 14 +Add file theta +K 10 +svn:author +V 8 +afackler +K 8 +svn:date +V 27 +2009-06-01T16:09:42.080715Z +PROPS-END + +Node-path: branches/magic/theta +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 6 +Text-content-md5: 9485edf7b8ec0fdf0bc3bdb9d5fc6c28 +Text-content-sha1: dde607ddc995205a6a521f47511bd04fa506e286 +Content-length: 16 + +PROPS-END +theta + + +Revision-number: 10 +Prop-content-length: 117 +Content-length: 117 + +K 7 +svn:log +V 14 +Make magic tag +K 10 +svn:author +V 8 +afackler +K 8 +svn:date +V 27 +2009-06-01T16:09:44.048672Z +PROPS-END + +Node-path: tags/magic +Node-kind: dir +Node-action: add +Node-copyfrom-rev: 9 +Node-copyfrom-path: branches/magic + + +Revision-number: 11 +Prop-content-length: 116 +Content-length: 116 + +K 7 +svn:log +V 13 +remove a file +K 10 +svn:author +V 8 +afackler +K 8 +svn:date +V 27 +2009-06-01T16:09:44.080801Z +PROPS-END + +Node-path: branches/magic/theta +Node-action: delete + + +Revision-number: 12 +Prop-content-length: 118 +Content-length: 118 + +K 7 +svn:log +V 15 +Tag magic again +K 10 +svn:author +V 8 +afackler +K 8 +svn:date +V 27 +2009-06-01T16:09:44.115205Z +PROPS-END + +Node-path: tags/magic2 +Node-kind: dir +Node-action: add +Node-copyfrom-rev: 11 +Node-copyfrom-path: branches/magic + + diff --git a/tests/run.py b/tests/run.py --- a/tests/run.py +++ b/tests/run.py @@ -25,6 +25,15 @@ import test_tags import test_utility_commands import test_urls +sys.path.append(os.path.join(os.path.dirname(__file__), 'comprehensive')) + +import test_stupid_pull +import test_verify + +def comprehensive(mod): + dir = os.path.basename(os.path.dirname(mod.__file__)) + return dir == 'comprehensive' + if __name__ == '__main__': kwargs = {'descriptions': 2} @@ -43,12 +52,14 @@ if __name__ == '__main__': args = [i.split('.py')[0].replace('-', '_') for i in args] if not args: - suite = [i[1].suite() for i in sorted(all.iteritems())] + check = lambda x: '-A' in sys.argv or not comprehensive(x) + mods = [m for (n, m) in sorted(all.iteritems()) if check(m)] + suite = [m.suite() for m in mods] else: suite = [] for arg in args: if arg not in all: - print 'test module %s not available' % arg + print >> sys.stderr, 'test module %s not available' % arg else: suite.append(all[arg].suite()) diff --git a/tests/test_fetch_branches.py b/tests/test_fetch_branches.py --- a/tests/test_fetch_branches.py +++ b/tests/test_fetch_branches.py @@ -22,9 +22,9 @@ class TestFetchBranches(test_util.TestBa return hg.repository(ui.ui(), self.wc_path) def openbranches(self, repo): - hctxs = [repo[hn] for hn in repo.heads(closed=False)] - branches = set(ctx.branch() for ctx in hctxs) - branches.discard('closed-branches') + hctxs = [repo[hn] for hn in repo.heads()] + branches = set(ctx.branch() for ctx in hctxs if + ctx.extra().get('close', None) != '1') return sorted(branches) def test_unrelatedbranch(self, stupid=False): @@ -68,6 +68,15 @@ class TestFetchBranches(test_util.TestBa self.assertEqual(repo['tip'].extra().get('close'), '1') self.assertEqual(self.openbranches(repo), ['default']) + def test_copybeforeclose(self, stupid=False): + repo = self._load_fixture_and_fetch('copybeforeclose.svndump', stupid) + self.assertEqual(repo['tip'].branch(), 'test') + self.assertEqual(repo['test'].extra().get('close'), '1') + self.assertEqual(repo['test']['b'].data(), 'a\n') + + def test_copybeforeclose_stupid(self): + self.test_copybeforeclose(True) + def test_replace_trunk_with_branch_stupid(self): self.test_replace_trunk_with_branch(stupid=True) @@ -107,6 +116,12 @@ class TestFetchBranches(test_util.TestBa def test_branches_weird_moves_stupid(self): self.test_branches_weird_moves(True) + def test_branch_delete_parent_dir(self, stupid=False): + repo = self._load_fixture_and_fetch('branch_delete_parent_dir.svndump', + stupid) + self.assertEqual(node.hex(repo['tip'].node()), + '4108a81a82c7925d5551091165dc54c41b06a8a8') + def suite(): all = [unittest.TestLoader().loadTestsFromTestCase(TestFetchBranches), ] diff --git a/tests/test_fetch_mappings.py b/tests/test_fetch_mappings.py --- a/tests/test_fetch_mappings.py +++ b/tests/test_fetch_mappings.py @@ -9,6 +9,8 @@ from mercurial import node import test_util +from hgsubversion import maps + class MapTests(test_util.TestBase): @property def authors(self): @@ -55,6 +57,18 @@ class MapTests(test_util.TestBase): def test_author_map_closing_author_stupid(self): self.test_author_map_closing_author(True) + def test_author_map_no_overwrite(self): + cwd = os.path.dirname(__file__) + orig = os.path.join(cwd, 'fixtures', 'author-map-test.txt') + new = open(self.authors, 'w') + new.write(open(orig).read()) + new.close() + test = maps.AuthorMap(ui.ui(), self.authors) + fromself = set(test) + test.load(orig) + all = set(test) + self.assertEqual(fromself.symmetric_difference(all), set()) + def test_file_map(self, stupid=False): test_util.load_svndump_fixture(self.repo_path, 'replace_trunk_with_branch.svndump') filemap = open(self.filemap, 'w') diff --git a/tests/test_push_command.py b/tests/test_push_command.py --- a/tests/test_push_command.py +++ b/tests/test_push_command.py @@ -50,6 +50,31 @@ class PushTests(test_util.TestBase): tip = self.repo['tip'] self.assertEqual(tip.node(), old_tip) + def test_cant_push_with_changes(self): + repo = self.repo + def file_callback(repo, memctx, path): + return context.memfilectx( + path=path, data='foo', islink=False, + isexec=False, copied=False) + ctx = context.memctx(repo, + (repo['default'].node(), node.nullid), + 'automated test', + ['adding_file'], + file_callback, + 'an_author', + '2008-10-07 20:59:48 -0500', + {'branch': 'default',}) + new_hash = repo.commitctx(ctx) + hg.update(repo, repo['tip'].node()) + # Touch an existing file + repo.wwrite('beta', 'something else', '') + try: + self.pushrevisions() + except hgutil.Abort: + pass + tip = self.repo['tip'] + self.assertEqual(new_hash, tip.node()) + def test_push_over_svnserve(self, commit=True): test_util.load_svndump_fixture(self.repo_path, 'simple_branch.svndump') open(os.path.join(self.repo_path, 'conf', 'svnserve.conf'), diff --git a/tests/test_rebuildmeta.py b/tests/test_rebuildmeta.py --- a/tests/test_rebuildmeta.py +++ b/tests/test_rebuildmeta.py @@ -8,7 +8,7 @@ from mercurial import hg from mercurial import ui from hgsubversion import svncommands -from hgsubversion import hg_delta_editor +from hgsubversion import svnmeta def _do_case(self, name, stupid): subdir = test_util.subdir.get(name, '') @@ -27,29 +27,29 @@ def _do_case(self, name, stupid): self.assertTrue(os.path.isdir(os.path.join(src.path, 'svn')), 'no .hg/svn directory in the destination!') dest = hg.repository(u, os.path.dirname(dest.path)) - for tf in ('rev_map', 'uuid'): + for tf in ('rev_map', 'uuid', 'tagmap', ): stf = os.path.join(src.path, 'svn', tf) self.assertTrue(os.path.isfile(stf), '%r is missing!' % stf) dtf = os.path.join(dest.path, 'svn', tf) self.assertTrue(os.path.isfile(dtf), '%r is missing!' % tf) - self.assertEqual(open(stf).read(), - open(dtf).read()) - self.assertEqual(src.branchtags(), dest.branchtags()) + old, new = open(stf).read(), open(dtf).read() + # uncomment next line for easy-ish debugging. + # os.system('diff -u %s %s' % (stf, dtf)) + self.assertEqual(old, new) + self.assertEqual(src.branchtags(), dest.branchtags()) srcbi = pickle.load(open(os.path.join(src.path, 'svn', 'branch_info'))) destbi = pickle.load(open(os.path.join(dest.path, 'svn', 'branch_info'))) self.assertEqual(sorted(srcbi.keys()), sorted(destbi.keys())) + revkeys = svnmeta.SVNMeta(dest).revmap.keys() for branch in destbi: srcinfo = srcbi[branch] destinfo = destbi[branch] - hge = hg_delta_editor.HgChangeReceiver(path=os.path.dirname(dest.path), - repo=dest, - ui_=u) - if destinfo[:2] == (None, 0): + if srcinfo[:2] == (None, 0) or destinfo[:2] == (None, 0): self.assert_(srcinfo[2] <= destinfo[2]) self.assertEqual(srcinfo[0], destinfo[0]) else: pr = sorted(filter(lambda x: x[1] == srcinfo[0] and x[0] <= srcinfo[1], - hge.revmap.keys()), reverse=True)[0][0] + revkeys), reverse=True)[0][0] self.assertEqual(pr, destinfo[1]) self.assertEqual(srcinfo[2], destinfo[2]) diff --git a/tests/test_tags.py b/tests/test_tags.py --- a/tests/test_tags.py +++ b/tests/test_tags.py @@ -1,11 +1,14 @@ +import os import unittest +from mercurial import commands from mercurial import hg from mercurial import node from mercurial import ui import test_util +from hgsubversion import svncommands from hgsubversion import svnrepo class TestTags(test_util.TestBase): @@ -54,9 +57,9 @@ class TestTags(test_util.TestBase): def test_tag_by_renaming_branch(self, stupid=False): repo = self._load_fixture_and_fetch('tag_by_rename_branch.svndump', stupid=stupid) - branches = set(repo[h] for h in repo.heads(closed=False)) + branches = set(repo[h] for h in repo.heads()) self.assert_('dummy' not in branches) - self.assertEqual(repo['dummy'], repo['tip'].parents()[0].parents()[0]) + self.assertEqual(repo['dummy'], repo['tip'].parents()[0]) extra = repo['tip'].extra().copy() extra.pop('convert_revision', None) self.assertEqual(extra, {'branch': 'dummy', 'close': '1'}) @@ -66,7 +69,7 @@ class TestTags(test_util.TestBase): def test_deletion_of_tag_on_trunk_after_branching(self): repo = self._load_fixture_and_fetch('tag_deletion_tag_branch.svndump') - branches = set(repo[h].extra()['branch'] for h in repo.heads(closed=False)) + branches = set(repo[h].extra()['branch'] for h in repo.heads()) self.assertEqual(branches, set(['default', 'from_2', ])) self.assertEqual( repo.tags(), @@ -76,7 +79,7 @@ class TestTags(test_util.TestBase): def test_tags_in_unusual_location(self): repo = self._load_fixture_and_fetch('unusual_tags.svndump') branches = set(repo[h].extra()['branch'] - for h in repo.heads(closed=False)) + for h in repo.heads()) self.assertEqual(branches, set(['default', 'dev_branch'])) tags = repo.tags() del tags['tip'] @@ -86,6 +89,105 @@ class TestTags(test_util.TestBase): 'versions/branch_version': 'I\x89\x1c>z#\xfc._K#@:\xd6\x1f\x96\xd6\x83\x1b|', }) + def test_most_recent_is_edited_stupid(self): + self.test_most_recent_is_edited(True) + + def test_most_recent_is_edited(self, stupid=False): + repo = self._load_fixture_and_fetch('most-recent-is-edit-tag.svndump', + stupid=stupid) + self.repo.ui.status( + "Note: this test failing may be because of a rebuildmeta failure.\n" + "You should check that before assuming issues with this test.\n") + wc2_path = self.wc_path + '2' + src, dest = hg.clone(repo.ui, self.wc_path, wc2_path, update=False) + svncommands.rebuildmeta(repo.ui, + dest, + os.path.dirname(dest.path), + args=[test_util.fileurl(self.repo_path), ]) + commands.pull(self.repo.ui, self.repo, stupid=stupid) + dtags, srctags = dest.tags(), self.repo.tags() + dtags.pop('tip') + srctags.pop('tip') + self.assertEqual(dtags, srctags) + self.assertEqual(dest.heads(), self.repo.heads()) + + def test_edited_tag_stupid(self): + self.test_edited_tag(True) + + def test_edited_tag(self, stupid=False): + repo = self._load_fixture_and_fetch('commit-to-tag.svndump', + stupid=stupid) + self.assertEqual(len(repo.heads()), 5) + heads = repo.heads() + openheads = [h for h in heads if not repo[h].extra().get('close', False)] + closedheads = set(heads) - set(openheads) + self.assertEqual(len(openheads), 1) + self.assertEqual(len(closedheads), 4) + closedheads = sorted(list(closedheads), + cmp=lambda x,y: cmp(repo[x].rev(), repo[y].rev())) + + # closeme has no open heads + for h in openheads: + self.assertNotEqual('closeme', repo[openheads[0]].branch()) + + self.assertEqual(1, len(self.repo.branchheads('magic'))) + + alsoedit, editlater, closeme, willedit, = closedheads + self.assertEqual( + repo[willedit].extra(), + {'close': '1', + 'branch': 'magic', + 'convert_revision': 'svn:af82cc90-c2d2-43cd-b1aa-c8a78449440a/tags/will-edit@19'}) + self.assertEqual(willedit, repo.tags()['will-edit']) + self.assertEqual(repo['will-edit'].manifest().keys(), ['alpha', + 'beta', + 'gamma', + ]) + self.assertEqual( + repo[alsoedit].extra(), + {'close': '1', + 'branch': 'magic', + 'convert_revision': 'svn:af82cc90-c2d2-43cd-b1aa-c8a78449440a/tags/also-edit@14'}) + self.assertEqual(repo[alsoedit].parents()[0].node(), repo.tags()['also-edit']) + self.assertEqual(repo['also-edit'].manifest().keys(), + ['beta', + '.hgtags', + 'delta', + 'alpha', + 'omega', + 'iota', + 'gamma', + 'lambda', + ]) + + self.assertEqual(editlater, repo['edit-later'].node()) + self.assertEqual( + repo[closeme].extra(), + {'close': '1', + 'branch': 'closeme', + 'convert_revision': 'svn:af82cc90-c2d2-43cd-b1aa-c8a78449440a/branches/closeme@17'}) + + def test_tags_in_unusual_location(self): + repo = self._load_fixture_and_fetch('tag_name_same_as_branch.svndump') + self.assertEqual(len(repo.heads()), 1) + branches = set(repo[h].extra()['branch'] + for h in repo.heads()) + self.assertEqual(branches, set(['magic', ])) + tags = repo.tags() + del tags['tip'] + self.assertEqual( + tags, + {'magic': '\xa2b\xb9\x03\xc6\xbd\x903\x95\xf5\x0f\x94\xcey\xc4E\xfaE6\xaa', + 'magic2': '\xa3\xa2D\x86aM\xc0v\xb9\xb0\x18\x14\xad\xacwBUi}\xe2', + }) + + def test_old_tag_map_rebuilds(self): + repo = self._load_fixture_and_fetch('tag_name_same_as_branch.svndump') + tm = os.path.join(repo.path, 'svn', 'tagmap') + open(tm, 'w').write('1\n') + commands.pull(repo.ui, repo) + self.assertEqual(open(tm).read().splitlines()[0], '2') + def suite(): return unittest.TestLoader().loadTestsFromTestCase(TestTags) diff --git a/tests/test_urls.py b/tests/test_urls.py --- a/tests/test_urls.py +++ b/tests/test_urls.py @@ -1,6 +1,7 @@ import test_util import unittest from hgsubversion.svnwrap.svn_swig_wrapper import parse_url +from hgsubversion import svnrepo class TestSubversionUrls(test_util.TestBase): def test_standard_url(self): @@ -8,17 +9,55 @@ class TestSubversionUrls(test_util.TestB parse_url('file:///var/svn/repo')) def test_user_url(self): - self.assertEqual(('joe', None, 'https://svn.testurl.com/repo'), - parse_url('https://joe@svn.testurl.com/repo')) + self.assertEqual( + ('joe', None, 'https://svn.testurl.com/repo'), + parse_url('https://joe@svn.testurl.com/repo')) + self.assertEqual( + ('bob', None, 'https://svn.testurl.com/repo'), + parse_url('https://joe@svn.testurl.com/repo', 'bob')) def test_password_url(self): - self.assertEqual((None, 't3stpw', 'svn+ssh://svn.testurl.com/repo'), - parse_url('svn+ssh://:t3stpw@svn.testurl.com/repo')) + self.assertEqual( + (None, 't3stpw', 'svn+ssh://svn.testurl.com/repo'), + parse_url('svn+ssh://:t3stpw@svn.testurl.com/repo')) + self.assertEqual( + (None, '123abc', 'svn+ssh://svn.testurl.com/repo'), + parse_url('svn+ssh://:t3stpw@svn.testurl.com/repo', None, '123abc')) + + def test_svnssh_preserve_user(self): + self.assertEqual( + ('user', 't3stpw', 'svn+ssh://user@svn.testurl.com/repo', ), + parse_url('svn+ssh://user:t3stpw@svn.testurl.com/repo')) + self.assertEqual( + ('bob', '123abc', 'svn+ssh://bob@svn.testurl.com/repo', ), + parse_url('svn+ssh://user:t3stpw@svn.testurl.com/repo', 'bob', '123abc')) + self.assertEqual( + ('user2', None, 'svn+ssh://user2@svn.testurl.com/repo', ), + parse_url('svn+ssh://user2@svn.testurl.com/repo')) + self.assertEqual( + ('bob', None, 'svn+ssh://bob@svn.testurl.com/repo', ), + parse_url('svn+ssh://user2@svn.testurl.com/repo', 'bob')) def test_user_password_url(self): - self.assertEqual(('joe', 't3stpw', 'https://svn.testurl.com/repo'), - parse_url('https://joe:t3stpw@svn.testurl.com/repo')) + self.assertEqual( + ('joe', 't3stpw', 'https://svn.testurl.com/repo'), + parse_url('https://joe:t3stpw@svn.testurl.com/repo')) + self.assertEqual( + ('bob', '123abc', 'https://svn.testurl.com/repo'), + parse_url('https://joe:t3stpw@svn.testurl.com/repo', 'bob', '123abc')) + +class TestSvnRepo(test_util.TestBase): + def test_url_rewriting(self): + ui = test_util.ui.ui() + ui.setconfig('hgsubversion', 'username', 'bob') + repo = svnrepo.svnremoterepo(ui, 'svn+ssh://joe@foo/bar') + self.assertEqual('svn+ssh://bob@foo/bar', repo.svnurl) + + repo = svnrepo.svnremoterepo(ui, 'svn+http://joe@foo/bar') + self.assertEqual(('http://foo/bar', 'bob', None), repo.svnauth) + repo = svnrepo.svnremoterepo(ui, 'svn+https://joe@foo/bar') + self.assertEqual(('https://foo/bar', 'bob', None), repo.svnauth) def suite(): all = [unittest.TestLoader().loadTestsFromTestCase(TestSubversionUrls)] diff --git a/tests/test_utility_commands.py b/tests/test_utility_commands.py --- a/tests/test_utility_commands.py +++ b/tests/test_utility_commands.py @@ -76,7 +76,7 @@ class UtilityTests(test_util.TestBase): {'branch': 'localbranch', }) new = self.repo.commitctx(ctx) hg.update(self.repo, new) - wrappers.parent(lambda x, y: None, u, self.repo, svn=True) + wrappers.parents(lambda x, y: None, u, self.repo, svn=True) actual = u.popbuffer() self.assertEqual(actual, 'changeset: 3:4e256962fc5d\n' @@ -88,19 +88,19 @@ class UtilityTests(test_util.TestBase): hg.update(self.repo, 'default') # Make sure styles work u.pushbuffer() - wrappers.parent(lambda x, y: None, u, self.repo, svn=True, style='compact') + wrappers.parents(lambda x, y: None, u, self.repo, svn=True, style='compact') actual = u.popbuffer() self.assertEqual(actual, '4:1 1083037b18d8 2008-10-08 01:39 +0000 durin\n' ' Add gamma on trunk.\n\n') # custom templates too u.pushbuffer() - wrappers.parent(lambda x, y: None, u, self.repo, svn=True, template='{node}\n') + wrappers.parents(lambda x, y: None, u, self.repo, svn=True, template='{node}\n') actual = u.popbuffer() self.assertEqual(actual, '1083037b18d85cd84fa211c5adbaeff0fea2cd9f\n') u.pushbuffer() - wrappers.parent(lambda x, y: None, u, self.repo, svn=True) + wrappers.parents(lambda x, y: None, u, self.repo, svn=True) actual = u.popbuffer() self.assertEqual(actual, 'changeset: 4:1083037b18d8\n' diff --git a/tools/bisect-find-bad.sh b/tools/bisect-find-bad.sh --- a/tools/bisect-find-bad.sh +++ b/tools/bisect-find-bad.sh @@ -1,4 +1,4 @@ #!/bin/bash . $(dirname $0)/common.sh -verify_current_revision $1 +hg svn verify exit $? diff --git a/tools/common.sh b/tools/common.sh --- a/tools/common.sh +++ b/tools/common.sh @@ -1,7 +1,7 @@ function verify_current_revision() { /bin/rm -rf * - exportcmd="svn export `hg svn info 2> /dev/null | grep '^URL: ' | sed 's/URL: //'` -r`hg svn info | grep ^Revision | sed 's/.*: //;s/ .*//'` . --force" + exportcmd="svn export `hg svn info 2> /dev/null | grep '^URL: ' | sed 's/URL: //'`@`hg svn info | grep ^Revision | sed 's/.*: //;s/ .*//'` . --force" `echo $exportcmd` > /dev/null x=$? if [[ "$x" != "0" ]] ; then diff --git a/tools/converttags.sh b/tools/converttags.sh deleted file mode 100644 --- a/tools/converttags.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash -# This shell script exists to convert hgsubversion tags to real hg tags. -# This will go away once hgsubversion's tags handling uses .hgtags directly. -hg tags | sed -E 's/([a-zA-Z0-9./_-]*) [^:]*:([a-f0-9]*)/\2 \1/' | grep -v ' tip$' > .hgtags -cat .hgtags | sed "$( -for x in `cat .hgtags| cut -f 1 -d ' '` ;do - echo -n "s/$x/" ; hg log --template '{node}' -r $x ; echo -n '/g; ' -done)" > .hgtags.new -mv .hgtags.new .hgtags diff --git a/tools/verify-all-heads.sh b/tools/verify-all-heads.sh --- a/tools/verify-all-heads.sh +++ b/tools/verify-all-heads.sh @@ -1,9 +1,8 @@ #!/bin/sh . $(dirname $0)/common.sh -for b in `hg branches -a | cut -f 1 -d ' ' | grep -v closed-branches` ; do +for b in `hg branches -aq` ; do hg co $b || break - echo Verifying $b - verify_current_revision keep > /dev/null || break - echo $b Verified. + echo verifying $b + hg svn verify done