# HG changeset patch # User Dan Villiom Podlaski Christiansen # Date 1241166539 -7200 # Node ID ce676eff002b728017f4ad2a848df9c605cbf34b # Parent f423a87808322df9e0d52fd9d60e7fd32626bf11# Parent ba8e91a7c077b88645a7a5680060decee17e457b First merge, totally untested. diff --git a/README b/README --- a/README +++ b/README @@ -1,4 +1,4 @@ --*-restructuredtext-*- +.. -*-restructuredtext-*- ============ hgsubversion @@ -14,7 +14,8 @@ internals of Mercurial and/or Subversion Installation ------------ You need to have Subversion installed with the SWIG Python bindings -from Subversion 1.5 or later. You need Mercurial 1.1.1 or later. +from Subversion 1.5 or later. You need Mercurial 1.3 (currently in development) +or later. .. _mercurial: http://selenic.com/repo/hg .. _mercurial-stable: http://selenic.com/repo/hg-stable @@ -42,18 +43,20 @@ Basic Use ----------- Get a new clone of an svn server:: - $ hg svnclone [destination] + $ hg clone [destination] Real example:: - $ hg svnclone http://python-nose.googlecode.com/svn nose-hg + $ hg clone svn+http://python-nose.googlecode.com/svn nose-hg Note, you should pull from the root subversion directory, not specific -folders (such as trunk). +folders (such as trunk). Also, you only need to modify http:// urls as shown. +This is a side effect of Mercurial and Subversion both claiming the http +protocol, so svn+http is used to work around that. Pull new revisions into an already-converted repo:: - $ hg svn pull + $ hg pull For more information, see ``hg help svn`` while in a converted repo. diff --git a/__init__.py b/__init__.py --- a/__init__.py +++ b/__init__.py @@ -13,17 +13,20 @@ details. ''' import os +import sys +import traceback from mercurial import commands -from mercurial import hg -from mercurial import util as mutil +from mercurial import extensions +from mercurial import util as hgutil from svn import core -import svncommand -import fetch_command +import svncommands import tag_repo import util +import wrappers +import svnexternals def reposetup(ui, repo): if not util.is_svn_repo(repo): @@ -31,45 +34,89 @@ def reposetup(ui, repo): repo.__class__ = tag_repo.generate_repo_class(ui, repo) +def uisetup(ui): + """Do our UI setup. + + Does the following wrappings: + * parent -> utility_commands.parent + * outgoing -> utility_commands.outgoing + """ + entry = extensions.wrapcommand(commands.table, 'parents', + wrappers.parent) + entry[1].append(('', 'svn', None, "show parent svn revision instead")) + entry = extensions.wrapcommand(commands.table, 'outgoing', + wrappers.outgoing) + entry[1].append(('', 'svn', None, "show revisions outgoing to subversion")) + entry = extensions.wrapcommand(commands.table, 'diff', + wrappers.diff) + entry[1].append(('', 'svn', None, + "show svn-style diffs, default against svn parent")) + entry = extensions.wrapcommand(commands.table, 'push', + wrappers.push) + entry[1].append(('', 'svn', None, "push to subversion")) + entry[1].append(('', 'svn-stupid', None, "use stupid replay during push to svn")) + entry = extensions.wrapcommand(commands.table, 'pull', + wrappers.pull) + entry[1].append(('', 'svn', None, "pull from subversion")) + entry[1].append(('', 'svn-stupid', None, "use stupid replay during pull from svn")) + + entry = extensions.wrapcommand(commands.table, 'clone', + wrappers.clone) + entry[1].extend([#('', 'skipto-rev', '0', 'skip commits before this revision.'), + ('', 'svn-stupid', False, 'be stupid and use diffy replay.'), + ('', 'svn-tag-locations', 'tags', 'Relative path to Subversion tags.'), + ('', 'svn-authors', '', 'username mapping filename'), + ('', 'svn-filemap', '', + 'remap file to exclude paths or include only certain paths'), + ]) -def svn(ui, repo, subcommand, *args, **opts): - '''see detailed help for list of subcommands''' try: - return svncommand.svncmd(ui, repo, subcommand, *args, **opts) - except core.SubversionException, e: - if e.apr_err == core.SVN_ERR_RA_SERF_SSL_CERT_UNTRUSTED: - raise mutil.Abort('It appears svn does not trust the ssl cert for this site.\n' - 'Please try running svn ls on that url first.') - raise + rebase = extensions.find('rebase') + if rebase: + entry = extensions.wrapcommand(rebase.cmdtable, 'rebase', wrappers.rebase) + entry[1].append(('', 'svn', None, 'automatic svn rebase', )) + except: + pass -def svn_fetch(ui, svn_url, hg_repo_path=None, **opts): - '''clone Subversion repository to a local Mercurial repository. +def svn(ui, repo, subcommand, *args, **opts): + '''see detailed help for list of subcommands''' - If no destination directory name is specified, it defaults to the - basename of the source plus "-hg". + # guess command if prefix + if subcommand not in svncommands.table: + candidates = [] + for c in svncommands.table: + if c.startswith(subcommand): + candidates.append(c) + if len(candidates) == 1: + subcommand = candidates[0] - You can specify multiple paths for the location of tags using comma - separated values. - ''' - if not hg_repo_path: - hg_repo_path = hg.defaultdest(svn_url) + "-hg" - ui.status("Assuming destination %s\n" % hg_repo_path) - should_update = not os.path.exists(hg_repo_path) - svn_url = util.normalize_url(svn_url) + path = os.path.dirname(repo.path) try: - res = fetch_command.fetch_revisions(ui, svn_url, hg_repo_path, **opts) + commandfunc = svncommands.table[subcommand] + if subcommand not in svncommands.nourl: + opts['svn_url'] = open(os.path.join(repo.path, 'svn', 'url')).read() + return commandfunc(ui, args=args, hg_repo_path=path, repo=repo, **opts) except core.SubversionException, e: if e.apr_err == core.SVN_ERR_RA_SERF_SSL_CERT_UNTRUSTED: - raise mutil.Abort('It appears svn does not trust the ssl cert for this site.\n' + raise hgutil.Abort('It appears svn does not trust the ssl cert for this site.\n' 'Please try running svn ls on that url first.') raise - if (res is None or res == 0) and should_update: - repo = hg.repository(ui, hg_repo_path) - commands.update(ui, repo, repo['tip'].node()) - return res + except TypeError: + tb = traceback.extract_tb(sys.exc_info()[2]) + if len(tb) == 1: + ui.status('Bad arguments for subcommand %s\n' % subcommand) + else: + raise + except KeyError, e: + tb = traceback.extract_tb(sys.exc_info()[2]) + if len(tb) == 1: + ui.status('Unknown subcommand %s\n' % subcommand) + else: + raise + + -commands.norepo += " svnclone" cmdtable = { "svn": (svn, @@ -79,18 +126,9 @@ cmdtable = { ('', 'filemap', '', 'remap file to exclude paths or include only certain paths'), ('', 'force', False, 'force an operation to happen'), + ('', 'username', '', 'username for authentication'), + ('', 'password', '', 'password for authentication'), ], - svncommand.generate_help(), + svncommands._helpgen(), ), - "svnclone": - (svn_fetch, - [('S', 'skipto-rev', 0, 'skip commits before this revision.'), - ('H', 'head', 0, 'skip revisions after this one.'), - ('', 'stupid', False, 'be stupid and use diffy replay.'), - ('T', 'tag-locations', 'tags', 'Relative path to Subversion tags.'), - ('A', 'authors', '', 'username mapping filename'), - ('', 'filemap', '', - 'remap file to exclude paths or include only certain paths'), - ], - 'hg svnclone source [dest]'), } diff --git a/cmdutil.py b/cmdutil.py new file mode 100644 --- /dev/null +++ b/cmdutil.py @@ -0,0 +1,300 @@ +#!/usr/bin/python +import re +import os +import urllib + +from mercurial import util as hgutil + +from svn import core + +import util +import svnwrap +import svnexternals + + +b_re = re.compile(r'^\+\+\+ b\/([^\n]*)', re.MULTILINE) +a_re = re.compile(r'^--- a\/([^\n]*)', re.MULTILINE) +devnull_re = re.compile(r'^([-+]{3}) /dev/null', re.MULTILINE) +header_re = re.compile(r'^diff --git .* b\/(.*)', re.MULTILINE) +newfile_devnull_re = re.compile(r'^--- /dev/null\n\+\+\+ b/([^\n]*)', + re.MULTILINE) + + +class NoFilesException(Exception): + """Exception raised when you try and commit without files. + """ + +def formatrev(rev): + if rev == -1: + return '\t(working copy)' + return '\t(revision %d)' % rev + + +def filterdiff(diff, oldrev, newrev): + diff = newfile_devnull_re.sub(r'--- \1\t(revision 0)' '\n' + r'+++ \1\t(working copy)', + diff) + oldrev = formatrev(oldrev) + newrev = formatrev(newrev) + diff = a_re.sub(r'--- \1'+ oldrev, diff) + diff = b_re.sub(r'+++ \1' + newrev, diff) + diff = devnull_re.sub(r'\1 /dev/null\t(working copy)', diff) + diff = header_re.sub(r'Index: \1' + '\n' + ('=' * 67), diff) + return diff + + +def parentrev(ui, repo, hge, svn_commit_hashes): + """Find the svn parent revision of the repo's dirstate. + """ + workingctx = repo.parents()[0] + outrev = util.outgoing_revisions(ui, repo, hge, svn_commit_hashes, + workingctx.node()) + if outrev: + workingctx = repo[outrev[-1]].parents()[0] + return workingctx + + +def replay_convert_rev(hg_editor, svn, r): + hg_editor.set_current_rev(r) + svn.get_replay(r.revnum, hg_editor) + i = 1 + if hg_editor.missing_plaintexts: + hg_editor.ui.debug('Fetching %s files that could not use replay.\n' % + len(hg_editor.missing_plaintexts)) + files_to_grab = set() + rootpath = svn.subdir and svn.subdir[1:] or '' + for p in hg_editor.missing_plaintexts: + hg_editor.ui.note('.') + hg_editor.ui.flush() + if p[-1] == '/': + dirpath = p[len(rootpath):] + files_to_grab.update([dirpath + f for f,k in + svn.list_files(dirpath, r.revnum) + if k == 'f']) + else: + files_to_grab.add(p[len(rootpath):]) + hg_editor.ui.note('\nFetching files...\n') + for p in files_to_grab: + hg_editor.ui.note('.') + hg_editor.ui.flush() + if i % 50 == 0: + svn.init_ra_and_client() + i += 1 + data, mode = svn.get_file(p, r.revnum) + hg_editor.set_file(p, data, 'x' in mode, 'l' in mode) + hg_editor.missing_plaintexts = set() + hg_editor.ui.note('\n') + hg_editor.commit_current_delta() + + +def _isdir(svn, branchpath, svndir): + try: + svn.list_dir('%s/%s' % (branchpath, svndir)) + return True + except core.SubversionException: + return False + + +def _getdirchanges(svn, branchpath, parentctx, ctx, changedfiles, extchanges): + """Compute directories to add or delete when moving from parentctx + to ctx, assuming only 'changedfiles' files changed, and 'extchanges' + external references changed (as returned by svnexternals.diff()). + + Return (added, deleted) where 'added' is the list of all added + directories and 'deleted' the list of deleted directories. + Intermediate directories are included: if a/b/c is new and requires + the addition of a/b and a, those will be listed too. Intermediate + deleted directories are also listed, but item order of undefined + in either list. + """ + def finddirs(path, includeself=False): + if includeself: + yield path + pos = path.rfind('/') + while pos != -1: + yield path[:pos] + pos = path.rfind('/', 0, pos) + + def getctxdirs(ctx, keptdirs, extdirs): + dirs = {} + for f in ctx.manifest(): + for d in finddirs(f): + if d in dirs: + break + if d in keptdirs: + dirs[d] = 1 + for extdir in extdirs: + for d in finddirs(extdir, True): + dirs[d] = 1 + return dirs + + deleted, added = [], [] + changeddirs = {} + for f in changedfiles: + if f in parentctx and f in ctx: + # Updated files cannot cause directories to be created + # or removed. + continue + for d in finddirs(f): + changeddirs[d] = 1 + for e in extchanges: + if not e[1] or not e[2]: + for d in finddirs(e[0], True): + changeddirs[d] = 1 + if not changeddirs: + return added, deleted + olddirs = getctxdirs(parentctx, changeddirs, + [e[0] for e in extchanges if e[1]]) + newdirs = getctxdirs(ctx, changeddirs, + [e[0] for e in extchanges if e[2]]) + + for d in newdirs: + if d not in olddirs and not _isdir(svn, branchpath, d): + added.append(d) + + for d in olddirs: + if d not in newdirs and _isdir(svn, branchpath, d): + deleted.append(d) + + return added, deleted + + +def _externals(ctx): + ext = svnexternals.externalsfile() + if '.hgsvnexternals' in ctx: + ext.read(ctx['.hgsvnexternals'].data()) + return ext + + +def commit_from_rev(ui, repo, rev_ctx, hg_editor, svn_url, base_revision, + username, password): + """Build and send a commit from Mercurial to Subversion. + """ + file_data = {} + svn = svnwrap.SubversionRepo(svn_url, username, password) + parent = rev_ctx.parents()[0] + parent_branch = rev_ctx.parents()[0].branch() + branch_path = 'trunk' + + if parent_branch and parent_branch != 'default': + branch_path = 'branches/%s' % parent_branch + + extchanges = list(svnexternals.diff(_externals(parent), + _externals(rev_ctx))) + addeddirs, deleteddirs = _getdirchanges(svn, branch_path, parent, rev_ctx, + rev_ctx.files(), extchanges) + deleteddirs = set(deleteddirs) + + props = {} + copies = {} + for file in rev_ctx.files(): + if file == '.hgsvnexternals': + continue + new_data = base_data = '' + action = '' + if file in rev_ctx: + fctx = rev_ctx.filectx(file) + new_data = fctx.data() + + if 'x' in fctx.flags(): + props.setdefault(file, {})['svn:executable'] = '*' + if 'l' in fctx.flags(): + props.setdefault(file, {})['svn:special'] = '*' + + if file not in parent: + renamed = fctx.renamed() + if renamed: + # TODO current model (and perhaps svn model) does not support + # this kind of renames: a -> b, b -> c + copies[file] = renamed[0] + base_data = parent[renamed[0]].data() + + action = 'add' + dirname = '/'.join(file.split('/')[:-1] + ['']) + else: + base_data = parent.filectx(file).data() + if ('x' in parent.filectx(file).flags() + and 'x' not in rev_ctx.filectx(file).flags()): + props.setdefault(file, {})['svn:executable'] = None + if ('l' in parent.filectx(file).flags() + and 'l' not in rev_ctx.filectx(file).flags()): + props.setdefault(file, {})['svn:special'] = None + action = 'modify' + else: + pos = file.rfind('/') + if pos >= 0: + if file[:pos] in deleteddirs: + # This file will be removed when its directory is removed + continue + action = 'delete' + file_data[file] = base_data, new_data, action + + def svnpath(p): + return '%s/%s' % (branch_path, p) + + changeddirs = [] + for d, v1, v2 in extchanges: + props.setdefault(svnpath(d), {})['svn:externals'] = v2 + if d not in deleteddirs and d not in addeddirs: + changeddirs.append(svnpath(d)) + + # Now we are done with files, we can prune deleted directories + # against themselves: ignore a/b if a/ is already removed + deleteddirs2 = list(deleteddirs) + deleteddirs2.sort(reverse=True) + for d in deleteddirs2: + pos = d.rfind('/') + if pos >= 0 and d[:pos] in deleteddirs: + deleteddirs.remove(d[:pos]) + + newcopies = {} + for source, dest in copies.iteritems(): + newcopies[svnpath(source)] = (svnpath(dest), base_revision) + + new_target_files = [svnpath(f) for f in file_data] + for tf, ntf in zip(file_data, new_target_files): + if tf in file_data: + file_data[ntf] = file_data[tf] + if tf in props: + props[ntf] = props[tf] + del props[tf] + if hgutil.binary(file_data[ntf][1]): + props.setdefault(ntf, {}).update(props.get(ntf, {})) + props.setdefault(ntf, {})['svn:mime-type'] = 'application/octet-stream' + del file_data[tf] + + addeddirs = [svnpath(d) for d in addeddirs] + deleteddirs = [svnpath(d) for d in deleteddirs] + new_target_files += addeddirs + deleteddirs + changeddirs + if not new_target_files: + raise NoFilesException() + try: + svn.commit(new_target_files, rev_ctx.description(), file_data, + base_revision, set(addeddirs), set(deleteddirs), + props, newcopies) + except core.SubversionException, e: + if hasattr(e, 'apr_err') and (e.apr_err == core.SVN_ERR_FS_TXN_OUT_OF_DATE + or e.apr_err == core.SVN_ERR_FS_CONFLICT): + raise hgutil.Abort('Base text was out of date, maybe rebase?') + else: + raise + + return True + +def islocalrepo(url): + if not url.startswith('file:///'): + return False + if '#' in url.split('/')[-1]: # strip off #anchor + url = url[:url.rfind('#')] + path = url[len('file://'):] + path = urllib.url2pathname(path).replace(os.sep, '/') + while '/' in path: + if reduce(lambda x,y: x and y, + map(lambda p: os.path.exists(os.path.join(path, p)), + ('hooks', 'format', 'db', ))): + return True + path = path.rsplit('/', 1)[0] + return False + +def issvnurl(url): + return url.startswith('svn') or islocalrepo(url) diff --git a/diff_cmd.py b/diff_cmd.py deleted file mode 100644 --- a/diff_cmd.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python -import re - -from mercurial import patch - -import util -import hg_delta_editor - -b_re = re.compile(r'^\+\+\+ b\/([^\n]*)', re.MULTILINE) -a_re = re.compile(r'^--- a\/([^\n]*)', re.MULTILINE) -devnull_re = re.compile(r'^([-+]{3}) /dev/null', re.MULTILINE) -header_re = re.compile(r'^diff --git .* b\/(.*)', re.MULTILINE) -newfile_devnull_re = re.compile(r'^--- /dev/null\n\+\+\+ b/([^\n]*)', - re.MULTILINE) -def filterdiff(diff, base_revision): - diff = newfile_devnull_re.sub(r'--- \1\t(revision 0)' '\n' - r'+++ \1\t(working copy)', - diff) - diff = a_re.sub(r'--- \1'+ ('\t(revision %d)' % base_revision), diff) - diff = b_re.sub(r'+++ \1' + '\t(working copy)', diff) - diff = devnull_re.sub(r'\1 /dev/null' '\t(working copy)', diff) - - diff = header_re.sub(r'Index: \1' + '\n' + ('=' * 67), diff) - return diff - - -def diff_command(ui, repo, hg_repo_path, **opts): - """show a diff of the most recent revision against its parent from svn - """ - hge = hg_delta_editor.HgChangeReceiver(hg_repo_path, - ui_=ui) - svn_commit_hashes = dict(zip(hge.revmap.itervalues(), - hge.revmap.iterkeys())) - parent = repo.parents()[0] - o_r = util.outgoing_revisions(ui, repo, hge, svn_commit_hashes, parent.node()) - if o_r: - parent = repo[o_r[-1]].parents()[0] - base_rev, _junk = svn_commit_hashes[parent.node()] - it = patch.diff(repo, parent.node(), None, - opts=patch.diffopts(ui, opts={'git': True, - 'show_function': False, - 'ignore_all_space': False, - 'ignore_space_change': False, - 'ignore_blank_lines': False, - 'unified': True, - 'text': False, - })) - ui.write(filterdiff(''.join(it), base_rev)) -diff_command = util.register_subcommand('diff')(diff_command) diff --git a/hg_delta_editor.py b/hg_delta_editor.py --- a/hg_delta_editor.py +++ b/hg_delta_editor.py @@ -8,14 +8,14 @@ import traceback from mercurial import context from mercurial import hg from mercurial import ui -from mercurial import util +from mercurial import util as hgutil from mercurial import revlog from mercurial import node from svn import delta from svn import core import svnexternals -import util as our_util +import util def pickle_atomic(data, file_path, dir=None): """pickle some data to a path atomically. @@ -31,7 +31,7 @@ def pickle_atomic(data, file_path, dir=N except: #pragma: no cover raise else: - util.rename(path, file_path) + hgutil.rename(path, file_path) def stash_exception_on_self(fn): """Stash any exception raised in the method on self. @@ -60,12 +60,12 @@ class HgChangeReceiver(delta.Editor): self.revmap[revnum, branch] = node_hash def last_known_revision(self): - ''' Obtain the highest numbered -- i.e. latest -- revision known. + """Obtain the highest numbered -- i.e. latest -- revision known. Currently, this function just iterates over the entire revision map using the max() builtin. This may be slow for extremely large repositories, but for now, it's fast enough. - ''' + """ try: return max(k[0] for k in self.revmap.iterkeys()) except ValueError: @@ -81,6 +81,8 @@ class HgChangeReceiver(delta.Editor): subdir is the subdirectory of the edits *on the svn server*. It is needed for stripping paths off in certain cases. """ + if repo and repo.ui and not ui_: + ui_ = repo.ui if not ui_: ui_ = ui.ui() self.ui = ui_ @@ -98,7 +100,7 @@ class HgChangeReceiver(delta.Editor): self.subdir = self.subdir[1:] self.revmap = {} if os.path.exists(self.revmap_file): - self.revmap = our_util.parse_revmap(self.revmap_file) + self.revmap = util.parse_revmap(self.revmap_file) self.branches = {} if os.path.exists(self.branch_info_file): f = open(self.branch_info_file) @@ -130,11 +132,22 @@ class HgChangeReceiver(delta.Editor): self.readauthors(authors) if self.authors: self.writeauthors() + + self.lastdate = '1970-01-01 00:00:00 -0000' self.includepaths = {} self.excludepaths = {} if filemap and os.path.exists(filemap): self.readfilemap(filemap) + def fixdate(self, date): + if date is not None: + date = date.replace('T', ' ').replace('Z', '').split('.')[0] + date += ' -0000' + self.lastdate = date + else: + date = self.lastdate + return date + def __setup_repo(self, repo_path): """Verify the repo is going to work out for us. @@ -150,7 +163,7 @@ class HgChangeReceiver(delta.Editor): self.repo = hg.repository(self.ui, repo_path, create=True) os.makedirs(os.path.dirname(self.uuid_file)) f = open(self.revmap_file, 'w') - f.write('%s\n' % our_util.REVMAP_FILE_VERSION) + f.write('%s\n' % util.REVMAP_FILE_VERSION) f.flush() f.close() @@ -206,8 +219,6 @@ class HgChangeReceiver(delta.Editor): while paths_need_discovery: p = paths_need_discovery.pop(0) path_could_be_file = True - # TODO(augie) Figure out if you can use break here in a for loop, quick - # testing of that failed earlier. ind = 0 while ind < len(paths_need_discovery) and not paths_need_discovery: if op.startswith(p): @@ -233,10 +244,14 @@ class HgChangeReceiver(delta.Editor): parentdir = '/'.join(path[:-1]) filepaths = [p for p in filepaths if not '/'.join(p).startswith(parentdir)] branchpath = self._normalize_path(parentdir) + if branchpath.startswith('tags/'): + continue branchname = self._localname(branchpath) if branchpath.startswith('trunk/'): branches[self._localname('trunk')] = 'trunk' continue + if branchname and branchname.startswith('../'): + continue branches[branchname] = branchpath return branches @@ -250,6 +265,7 @@ class HgChangeReceiver(delta.Editor): def _localname(self, path): """Compute the local name for a branch located at path. """ + assert not path.startswith('tags/') if path == 'trunk': return None elif path.startswith('branches/'): @@ -274,6 +290,8 @@ class HgChangeReceiver(delta.Editor): known. """ path = self._normalize_path(path) + if path.startswith('tags/'): + return None, None, None test = '' path_comps = path.split('/') while self._localname(test) not in self.branches and len(path_comps): @@ -288,10 +306,17 @@ class HgChangeReceiver(delta.Editor): if path.startswith('trunk/'): path = test.split('/')[1:] test = 'trunk' + elif path.startswith('branches/'): + elts = path.split('/') + test = '/'.join(elts[:2]) + path = '/'.join(elts[2:]) else: path = test.split('/')[-1] test = '/'.join(test.split('/')[:-1]) - return path, self._localname(test), test + ln = self._localname(test) + if ln and ln.startswith('../'): + return None, None, None + return path, ln, test def set_current_rev(self, rev): """Set the revision we're currently converting. @@ -358,6 +383,8 @@ class HgChangeReceiver(delta.Editor): return True def _is_path_valid(self, path): + if path is None: + return False subpath = self._split_branch_path(path)[0] if subpath is None: return False @@ -504,7 +531,7 @@ class HgChangeReceiver(delta.Editor): # check for case 5 for known in self.branches: if self._svnpath(known).startswith(p): - self.branches_to_delete.add(br) # case 5 + self.branches_to_delete.add(known) # case 5 added_branches.update(self.__determine_parent_branch(p, paths[p].copyfrom_path, paths[p].copyfrom_rev, revision.revnum)) for t in tags_to_delete: @@ -565,8 +592,7 @@ class HgChangeReceiver(delta.Editor): files_to_commit.sort() branch_batches = {} rev = self.current_rev - date = rev.date.replace('T', ' ').replace('Z', '').split('.')[0] - date += ' -0000' + date = self.fixdate(rev.date) # build up the branches that have files on them for f in files_to_commit: @@ -615,10 +641,9 @@ class HgChangeReceiver(delta.Editor): revlog.nullid) if parents[0] in closed_revs and branch in self.branches_to_delete: continue - # TODO this needs to be fixed with the new revmap - extra = our_util.build_extra(rev.revnum, branch, - open(self.uuid_file).read(), - self.subdir) + extra = util.build_extra(rev.revnum, branch, + open(self.uuid_file).read(), + self.subdir) if branch is not None: if (branch not in self.branches and branch not in self.repo.branchtags()): @@ -658,7 +683,7 @@ class HgChangeReceiver(delta.Editor): date, extra) new_hash = self.repo.commitctx(current_ctx) - our_util.describe_commit(self.ui, new_hash, branch) + util.describe_commit(self.ui, new_hash, branch) if (rev.revnum, branch) not in self.revmap: self.add_to_revmap(rev.revnum, branch, new_hash) # now we handle branches that need to be committed without any files @@ -671,9 +696,9 @@ class HgChangeReceiver(delta.Editor): raise IOError # True here meant nuke all files, shouldn't happen with branch closing if self.commit_branches_empty[branch]: #pragma: no cover - raise util.Abort('Empty commit to an open branch attempted. ' - 'Please report this issue.') - extra = our_util.build_extra(rev.revnum, branch, + raise hgutil.Abort('Empty commit to an open branch attempted. ' + 'Please report this issue.') + extra = util.build_extra(rev.revnum, branch, open(self.uuid_file).read(), self.subdir) current_ctx = context.memctx(self.repo, @@ -685,23 +710,23 @@ class HgChangeReceiver(delta.Editor): date, extra) new_hash = self.repo.commitctx(current_ctx) - our_util.describe_commit(self.ui, new_hash, branch) + util.describe_commit(self.ui, new_hash, branch) if (rev.revnum, branch) not in self.revmap: self.add_to_revmap(rev.revnum, branch, new_hash) self._save_metadata() self.clear_current_info() def authorforsvnauthor(self, author): - if(author in self.authors): + if author in self.authors: return self.authors[author] - return '%s%s' %(author, self.author_host) + return '%s%s' % (author, self.author_host) def svnauthorforauthor(self, author): for svnauthor, hgauthor in self.authors.iteritems(): if author == hgauthor: return svnauthor else: - # Mercurial incorrectly splits at e.g. '.', so we roll our own. + # return the original svn-side author return author.rsplit('@', 1)[0] def readauthors(self, authorfile): @@ -837,8 +862,8 @@ class HgChangeReceiver(delta.Editor): # assuming it is a directory self.externals[path] = None map(self.delete_file, [pat for pat in self.current_files.iterkeys() - if pat.startswith(path)]) - for f in ctx.walk(our_util.PrefixMatch(br_path2)): + if pat.startswith(path+'/')]) + for f in ctx.walk(util.PrefixMatch(br_path2)): f_p = '%s/%s' % (path, f[len(br_path2):]) if f_p not in self.current_files: self.delete_file(f_p) @@ -846,7 +871,7 @@ class HgChangeReceiver(delta.Editor): delete_entry = stash_exception_on_self(delete_entry) def open_file(self, path, parent_baton, base_revision, p=None): - self.current_file = 'foobaz' + self.current_file = None fpath, branch = self._path_and_branch_for_path(path) if fpath: self.current_file = path @@ -891,7 +916,7 @@ class HgChangeReceiver(delta.Editor): def add_file(self, path, parent_baton=None, copyfrom_path=None, copyfrom_revision=None, file_pool=None): - self.current_file = 'foobaz' + self.current_file = None self.base_revision = None if path in self.deleted_files: del self.deleted_files[path] @@ -953,6 +978,7 @@ class HgChangeReceiver(delta.Editor): source_rev = copyfrom_revision cp_f, source_branch = self._path_and_branch_for_path(copyfrom_path) if cp_f == '' and br_path == '': + assert br_path is not None self.branches[branch] = source_branch, source_rev, self.current_rev.revnum new_hash = self.get_parent_revision(source_rev + 1, source_branch) @@ -1036,8 +1062,8 @@ class HgChangeReceiver(delta.Editor): handler, baton = delta.svn_txdelta_apply(source, target, None) if not callable(handler): #pragma: no cover - raise util.Abort('Error in Subversion bindings: ' - 'cannot call handler!') + raise hgutil.Abort('Error in Subversion bindings: ' + 'cannot call handler!') def txdelt_window(window): try: if not self._is_path_valid(self.current_file): @@ -1050,7 +1076,7 @@ class HgChangeReceiver(delta.Editor): if e.apr_err == core.SVN_ERR_INCOMPLETE_DATA: self.missing_plaintexts.add(self.current_file) else: #pragma: no cover - raise util.Abort(*e.args) + raise hgutil.Abort(*e.args) except: #pragma: no cover print len(base), self.current_file self._exception_info = sys.exc_info() diff --git a/push_cmd.py b/push_cmd.py deleted file mode 100644 --- a/push_cmd.py +++ /dev/null @@ -1,280 +0,0 @@ -from mercurial import util as merc_util -from mercurial import hg -from mercurial import node -from svn import core - -import util -import hg_delta_editor -import svnexternals -import svnwrap -import fetch_command -import utility_commands - - -def push_revisions_to_subversion(ui, repo, hg_repo_path, svn_url, - stupid=False, **opts): - """push revisions starting at a specified head back to Subversion. - """ - oldencoding = merc_util._encoding - merc_util._encoding = 'UTF-8' - hge = hg_delta_editor.HgChangeReceiver(hg_repo_path, - ui_=ui) - svn_commit_hashes = dict(zip(hge.revmap.itervalues(), - hge.revmap.iterkeys())) - # Strategy: - # 1. Find all outgoing commits from this head - if len(repo.parents()) != 1: - ui.status('Cowardly refusing to push branch merge') - return 1 - workingrev = repo.parents()[0] - outgoing = util.outgoing_revisions(ui, repo, hge, svn_commit_hashes, workingrev.node()) - if not (outgoing and len(outgoing)): - ui.status('No revisions to push.') - return 0 - while outgoing: - oldest = outgoing.pop(-1) - old_ctx = repo[oldest] - if len(old_ctx.parents()) != 1: - ui.status('Found a branch merge, this needs discussion and ' - 'implementation.') - return 1 - base_n = old_ctx.parents()[0].node() - old_children = repo[base_n].children() - svnbranch = repo[base_n].branch() - oldtip = base_n - samebranchchildren = [c for c in repo[oldtip].children() if c.branch() == svnbranch - and c.node() in svn_commit_hashes] - while samebranchchildren: - oldtip = samebranchchildren[0].node() - samebranchchildren = [c for c in repo[oldtip].children() if c.branch() == svnbranch - and c.node() in svn_commit_hashes] - # 2. Commit oldest revision that needs to be pushed - base_revision = svn_commit_hashes[base_n][0] - commit_from_rev(ui, repo, old_ctx, hge, svn_url, base_revision) - # 3. Fetch revisions from svn - r = fetch_command.fetch_revisions(ui, svn_url, hg_repo_path, - stupid=stupid) - assert not r or r == 0 - # 4. Find the new head of the target branch - repo = hg.repository(ui, hge.path) - oldtipctx = repo[oldtip] - replacement = [c for c in oldtipctx.children() if c not in old_children - and c.branch() == oldtipctx.branch()] - assert len(replacement) == 1, 'Replacement node came back as: %r' % replacement - replacement = replacement[0] - # 5. Rebase all children of the currently-pushing rev to the new branch - heads = repo.heads(old_ctx.node()) - for needs_transplant in heads: - def extrafn(ctx, extra): - if ctx.node() == oldest: - return - extra['branch'] = ctx.branch() - utility_commands.rebase_commits(ui, repo, hg_repo_path, - extrafn=extrafn, - sourcerev=needs_transplant, - **opts) - repo = hg.repository(ui, hge.path) - for child in repo[replacement.node()].children(): - rebasesrc = node.bin(child.extra().get('rebase_source', node.hex(node.nullid))) - if rebasesrc in outgoing: - while rebasesrc in outgoing: - rebsrcindex = outgoing.index(rebasesrc) - outgoing = (outgoing[0:rebsrcindex] + - [child.node(), ] + outgoing[rebsrcindex+1:]) - children = [c for c in child.children() if c.branch() == child.branch()] - if children: - child = children[0] - rebasesrc = node.bin(child.extra().get('rebase_source', node.hex(node.nullid))) - hge = hg_delta_editor.HgChangeReceiver(hg_repo_path, ui_=ui) - svn_commit_hashes = dict(zip(hge.revmap.itervalues(), hge.revmap.iterkeys())) - merc_util._encoding = oldencoding - return 0 -push_revisions_to_subversion = util.register_subcommand('push')(push_revisions_to_subversion) -# for git expats -push_revisions_to_subversion = util.register_subcommand('dcommit')(push_revisions_to_subversion) - -def _isdir(svn, branchpath, svndir): - try: - svn.list_dir('%s/%s' % (branchpath, svndir)) - return True - except core.SubversionException: - return False - -def _getdirchanges(svn, branchpath, parentctx, ctx, changedfiles, extchanges): - """Compute directories to add or delete when moving from parentctx - to ctx, assuming only 'changedfiles' files changed, and 'extchanges' - external references changed (as returned by svnexternals.diff()). - - Return (added, deleted) where 'added' is the list of all added - directories and 'deleted' the list of deleted directories. - Intermediate directories are included: if a/b/c is new and requires - the addition of a/b and a, those will be listed too. Intermediate - deleted directories are also listed, but item order of undefined - in either list. - """ - def finddirs(path, includeself=False): - if includeself: - yield path - pos = path.rfind('/') - while pos != -1: - yield path[:pos] - pos = path.rfind('/', 0, pos) - - def getctxdirs(ctx, keptdirs, extdirs): - dirs = {} - for f in ctx.manifest(): - for d in finddirs(f): - if d in dirs: - break - if d in keptdirs: - dirs[d] = 1 - for extdir in extdirs: - for d in finddirs(extdir, True): - dirs[d] = 1 - return dirs - - deleted, added = [], [] - changeddirs = {} - for f in changedfiles: - if f in parentctx and f in ctx: - # Updated files cannot cause directories to be created - # or removed. - continue - for d in finddirs(f): - changeddirs[d] = 1 - for e in extchanges: - if not e[1] or not e[2]: - for d in finddirs(e[0], True): - changeddirs[d] = 1 - if not changeddirs: - return added, deleted - olddirs = getctxdirs(parentctx, changeddirs, - [e[0] for e in extchanges if e[1]]) - newdirs = getctxdirs(ctx, changeddirs, - [e[0] for e in extchanges if e[2]]) - - for d in newdirs: - if d not in olddirs and not _isdir(svn, branchpath, d): - added.append(d) - - for d in olddirs: - if d not in newdirs and _isdir(svn, branchpath, d): - deleted.append(d) - - return added, deleted - -def _externals(ctx): - ext = svnexternals.externalsfile() - if '.hgsvnexternals' in ctx: - ext.read(ctx['.hgsvnexternals'].data()) - return ext - -def commit_from_rev(ui, repo, rev_ctx, hg_editor, svn_url, base_revision): - """Build and send a commit from Mercurial to Subversion. - """ - file_data = {} - svn = svnwrap.SubversionRepo(svn_url, username=merc_util.getuser()) - parent = rev_ctx.parents()[0] - parent_branch = rev_ctx.parents()[0].branch() - branch_path = 'trunk' - - if parent_branch and parent_branch != 'default': - branch_path = 'branches/%s' % parent_branch - - extchanges = list(svnexternals.diff(_externals(parent), - _externals(rev_ctx))) - addeddirs, deleteddirs = _getdirchanges(svn, branch_path, parent, rev_ctx, - rev_ctx.files(), extchanges) - deleteddirs = set(deleteddirs) - - props = {} - copies = {} - for file in rev_ctx.files(): - if file == '.hgsvnexternals': - continue - new_data = base_data = '' - action = '' - if file in rev_ctx: - fctx = rev_ctx.filectx(file) - new_data = fctx.data() - - if 'x' in fctx.flags(): - props.setdefault(file, {})['svn:executable'] = '*' - if 'l' in fctx.flags(): - props.setdefault(file, {})['svn:special'] = '*' - - if file not in parent: - renamed = fctx.renamed() - if renamed: - # TODO current model (and perhaps svn model) does not support - # this kind of renames: a -> b, b -> c - copies[file] = renamed[0] - base_data = parent[renamed[0]].data() - - action = 'add' - dirname = '/'.join(file.split('/')[:-1] + ['']) - else: - base_data = parent.filectx(file).data() - if ('x' in parent.filectx(file).flags() - and 'x' not in rev_ctx.filectx(file).flags()): - props.setdefault(file, {})['svn:executable'] = None - if ('l' in parent.filectx(file).flags() - and 'l' not in rev_ctx.filectx(file).flags()): - props.setdefault(file, {})['svn:special'] = None - action = 'modify' - else: - pos = file.rfind('/') - if pos >= 0: - if file[:pos] in deleteddirs: - # This file will be removed when its directory is removed - continue - action = 'delete' - file_data[file] = base_data, new_data, action - - def svnpath(p): - return '%s/%s' % (branch_path, p) - - changeddirs = [] - for d, v1, v2 in extchanges: - props.setdefault(svnpath(d), {})['svn:externals'] = v2 - if d not in deleteddirs and d not in addeddirs: - changeddirs.append(svnpath(d)) - - # Now we are done with files, we can prune deleted directories - # against themselves: ignore a/b if a/ is already removed - deleteddirs2 = list(deleteddirs) - deleteddirs2.sort(reverse=True) - for d in deleteddirs2: - pos = d.rfind('/') - if pos >= 0 and d[:pos] in deleteddirs: - deleteddirs.remove(d[:pos]) - - newcopies = {} - for source, dest in copies.iteritems(): - newcopies[svnpath(source)] = (svnpath(dest), base_revision) - - new_target_files = [svnpath(f) for f in file_data] - for tf, ntf in zip(file_data, new_target_files): - if tf in file_data: - file_data[ntf] = file_data[tf] - if tf in props: - props[ntf] = props[tf] - del props[tf] - if merc_util.binary(file_data[ntf][1]): - props.setdefault(ntf, {}).update(props.get(ntf, {})) - props.setdefault(ntf, {})['svn:mime-type'] = 'application/octet-stream' - del file_data[tf] - - addeddirs = [svnpath(d) for d in addeddirs] - deleteddirs = [svnpath(d) for d in deleteddirs] - new_target_files += addeddirs + deleteddirs + changeddirs - try: - svn.commit(new_target_files, rev_ctx.description(), file_data, - base_revision, set(addeddirs), set(deleteddirs), - props, newcopies) - except core.SubversionException, e: - if hasattr(e, 'apr_err') and (e.apr_err == core.SVN_ERR_FS_TXN_OUT_OF_DATE - or e.apr_err == core.SVN_ERR_FS_CONFLICT): - raise merc_util.Abort('Base text was out of date, maybe rebase?') - else: - raise diff --git a/rebuildmeta.py b/rebuildmeta.py deleted file mode 100644 --- a/rebuildmeta.py +++ /dev/null @@ -1,120 +0,0 @@ -import os -import pickle - -from mercurial import node -from mercurial import util as mutil - -import svnwrap -import util - -def rebuildmeta(ui, repo, hg_repo_path, args, **opts): - """rebuild hgsubversion metadata using values stored in revisions - """ - if len(args) != 1: - raise mutil.Abort('You must pass the svn URI used to create this repo.') - uuid = None - url = args[0].rstrip('/') - svn = svnwrap.SubversionRepo(url=url) - subdir = svn.subdir - svnmetadir = os.path.join(repo.path, 'svn') - if not os.path.exists(svnmetadir): - os.makedirs(svnmetadir) - - revmap = open(os.path.join(svnmetadir, 'rev_map'), 'w') - revmap.write('1\n') - last_rev = -1 - branchinfo = {} - noderevnums = {} - for rev in repo: - ctx = repo[rev] - convinfo = ctx.extra().get('convert_revision', None) - if convinfo: - assert convinfo.startswith('svn:') - revpath, revision = convinfo[40:].split('@') - if subdir and subdir[0] != '/': - subdir = '/' + subdir - if subdir and subdir[-1] == '/': - subdir = subdir[:-1] - assert revpath.startswith(subdir), ('That does not look like the ' - 'right location in the repo.') - if uuid is None: - uuid = convinfo[4:40] - assert uuid == svn.uuid, 'UUIDs did not match!' - urlfile = open(os.path.join(svnmetadir, 'url'), 'w') - urlfile.write(url) - urlfile.close() - uuidfile = open(os.path.join(svnmetadir, 'uuid'), 'w') - uuidfile.write(uuid) - uuidfile.close() - commitpath = revpath[len(subdir)+1:] - if commitpath.startswith('branches'): - commitpath = commitpath[len('branches/'):] - elif commitpath == 'trunk': - commitpath = '' - else: - assert False, 'Unhandled case in rebuildmeta' - revmap.write('%s %s %s\n' % (revision, - node.hex(ctx.node()), - commitpath)) - revision = int(revision) - noderevnums[ctx.node()] = revision - if revision > last_rev: - last_rev = revision - branch = ctx.branch() - if branch == 'default': - branch = None - if branch not in branchinfo: - parent = ctx.parents()[0] - if (parent.node() in noderevnums - and parent.branch() != ctx.branch()): - parentbranch = parent.branch() - if parentbranch == 'default': - parentbranch = None - else: - parentbranch = None - branchinfo[branch] = (parentbranch, - noderevnums.get(parent.node(), 0), - revision) - for c in ctx.children(): - if c.branch() == 'closed-branches': - if branch in branchinfo: - del branchinfo[branch] - branchinfofile = open(os.path.join(svnmetadir, 'branch_info'), 'w') - pickle.dump(branchinfo, branchinfofile) - branchinfofile.close() - tagsinfo = {} - realtags = svn.tags - tagsleft = realtags.items() - while tagsleft: - tag, tagparent = tagsleft.pop(0) - source, rev = tagparent - if source.startswith('tags/'): - src = source[len('tags/'):] - if src in tagsinfo: - tagsinfo[tag] = tagsinfo[src] - elif src in realtags: - if (realtags[src][1] <= last_rev - or realtags[src][0].startswith('tags/')): - tagsleft.append(src) - else: - older_tags = svn.tags_at_rev(rev) - newsrc, newrev = older_tags[src] - tagsleft.append((tag, (newsrc, newrev))) - if source.startswith('branches/') or source == 'trunk': - source = determinebranch(source) - if rev <= last_rev: - tagsinfo[tag] = source, rev - tagsinfofile = open(os.path.join(svnmetadir, 'tag_info'), 'w') - pickle.dump(tagsinfo, tagsinfofile) - tagsinfofile.close() -rebuildmeta = util.register_subcommand('rebuildmeta')(rebuildmeta) -rebuildmeta = util.command_needs_no_url(rebuildmeta) - -def determinebranch(branch): - if branch.startswith('branches'): - branch = branch[len('branches/'):] - elif branch == 'trunk': - branch = None - else: - assert False, 'Unhandled case while regenerating metadata.' - return branch diff --git a/fetch_command.py b/stupid.py rename from fetch_command.py rename to stupid.py --- a/fetch_command.py +++ b/stupid.py @@ -1,158 +1,17 @@ import cStringIO import re -import os from mercurial import patch from mercurial import node from mercurial import context from mercurial import revlog -from mercurial import util as merc_util from svn import core -from svn import delta -import hg_delta_editor import svnwrap import svnexternals import util -def print_your_svn_is_old_message(ui): #pragma: no cover - ui.status("In light of that, I'll fall back and do diffs, but it won't do " - "as good a job. You should really upgrade your server.\n") - - -def fetch_revisions(ui, svn_url, hg_repo_path, skipto_rev=0, head=0, - stupid=None, - tag_locations='tags', - authors=None, - filemap=None, - **opts): - """pull new revisions from Subversion - """ - - svn_url = util.normalize_url(svn_url) - old_encoding = merc_util._encoding - merc_util._encoding = 'UTF-8' - skipto_rev=int(skipto_rev) - - have_replay = not stupid - if have_replay and not callable( - delta.svn_txdelta_apply(None, None, None)[0]): #pragma: no cover - ui.status('You are using old Subversion SWIG bindings. Replay will not' - ' work until you upgrade to 1.5.0 or newer. Falling back to' - ' a slower method that may be buggier. Please upgrade, or' - ' contribute a patch to use the ctypes bindings instead' - ' of SWIG.\n') - have_replay = False - svn = svnwrap.SubversionRepo(svn_url, username=merc_util.getuser()) - author_host = "@%s" % svn.uuid - tag_locations = tag_locations.split(',') - hg_editor = hg_delta_editor.HgChangeReceiver(hg_repo_path, - ui_=ui, - subdir=svn.subdir, - author_host=author_host, - tag_locations=tag_locations, - authors=authors, - filemap=filemap) - - if os.path.exists(hg_editor.uuid_file): - initializing_repo = False - uuid = open(hg_editor.uuid_file).read() - assert uuid == svn.uuid - start = hg_editor.last_known_revision() - else: - open(hg_editor.uuid_file, 'w').write(svn.uuid) - open(hg_editor.svn_url_file, 'w').write(svn_url) - initializing_repo = True - start = skipto_rev - - if head <= 0: - stop = svn.last_changed_rev - else: - stop = head - - if initializing_repo and start > 0: - raise merc_util.Abort('Revision skipping at repository initialization ' - 'remains unimplemented.') - - if start >= stop: - ui.status('No new revisions beyond %d.\n' % stop) - return - else: - ui.status('Pulling revisions %d through %d.\n' % (start, stop)) - - # start converting revisions - for r in svn.revisions(start=start, stop=head): - valid = True - hg_editor.update_branch_tag_map_for_rev(r) - for p in r.paths: - if hg_editor._is_path_valid(p): - valid = True - break - if valid: - # got a 502? Try more than once! - tries = 0 - converted = False - while not converted and tries < 3: - try: - util.describe_revision(ui, r) - if have_replay: - try: - replay_convert_rev(hg_editor, svn, r, skipto_rev) - except svnwrap.SubversionRepoCanNotReplay, e: #pragma: no cover - ui.status('%s\n' % e.message) - print_your_svn_is_old_message(ui) - have_replay = False - stupid_svn_server_pull_rev(ui, svn, hg_editor, r) - else: - stupid_svn_server_pull_rev(ui, svn, hg_editor, r) - converted = True - except core.SubversionException, e: #pragma: no cover - if e.apr_err == core.SVN_ERR_RA_DAV_REQUEST_FAILED: - tries += 1 - ui.status('Got a 502, retrying (%s)\n' % tries) - else: - raise merc_util.Abort(*e.args) - merc_util._encoding = old_encoding -fetch_revisions = util.register_subcommand('pull')(fetch_revisions) - - -def cleanup_file_handles(svn, count): - if count % 50 == 0: - svn.init_ra_and_client() - -def replay_convert_rev(hg_editor, svn, r, skipto_rev): - hg_editor.set_current_rev(r) - svn.get_replay(r.revnum, hg_editor, skipto_rev) - i = 1 - if hg_editor.missing_plaintexts: - hg_editor.ui.debug('Fetching %s files that could not use replay.\n' % - len(hg_editor.missing_plaintexts)) - files_to_grab = set() - rootpath = svn.subdir and svn.subdir[1:] or '' - for p in hg_editor.missing_plaintexts: - hg_editor.ui.note('.') - hg_editor.ui.flush() - if p[-1] == '/': - dirpath = p[len(rootpath):] - files_to_grab.update([dirpath + f for f,k in - svn.list_files(dirpath, r.revnum) - if k == 'f']) - else: - files_to_grab.add(p[len(rootpath):]) - hg_editor.ui.note('\nFetching files...\n') - for p in files_to_grab: - hg_editor.ui.note('.') - hg_editor.ui.flush() - cleanup_file_handles(svn, i) - i += 1 - data, mode = svn.get_file(p, r.revnum) - hg_editor.set_file(p, data, 'x' in mode, 'l' in mode) - hg_editor.missing_plaintexts = set() - hg_editor.ui.note('\n') - hg_editor.commit_current_delta() - - binary_file_re = re.compile(r'''Index: ([^\n]*) =* Cannot display: file marked as a binary type.''') @@ -181,6 +40,16 @@ property_special_removed_re = re.compile (?:Deleted|Name): svn:special \-''') + +class BadPatchApply(Exception): + pass + + +def print_your_svn_is_old_message(ui): #pragma: no cover + ui.status("In light of that, I'll fall back and do diffs, but it won't do " + "as good a job. You should really upgrade your server.\n") + + def mempatchproxy(parentctx, files): # Avoid circular references patch.patchfile -> mempatch patchfile = patch.patchfile @@ -222,7 +91,8 @@ def filteriterhunks(hg_editor): yield data return filterhunks -def stupid_diff_branchrev(ui, svn, hg_editor, branch, r, parentctx): + +def diff_branchrev(ui, svn, hg_editor, branch, r, parentctx): """Extract all 'branch' content at a given revision. Return a tuple (files, filectxfn) where 'files' is the list of all files @@ -456,7 +326,7 @@ def getcopies(svn, hg_editor, branch, br hgcopies.update(copies) return hgcopies -def stupid_fetch_externals(svn, branchpath, r, parentctx): +def fetch_externals(svn, branchpath, r, parentctx): """Extract svn:externals for the current revision and branch Return an externalsfile instance or None if there are no externals @@ -507,7 +377,8 @@ def stupid_fetch_externals(svn, branchpa return None return externals -def stupid_fetch_branchrev(svn, hg_editor, branch, branchpath, r, parentctx): + +def fetch_branchrev(svn, hg_editor, branch, branchpath, r, parentctx): """Extract all 'branch' content at a given revision. Return a tuple (files, filectxfn) where 'files' is the list of all files @@ -559,7 +430,7 @@ def stupid_fetch_branchrev(svn, hg_edito return files, filectxfn -def stupid_svn_server_pull_rev(ui, svn, hg_editor, r): +def svn_server_pull_rev(ui, svn, hg_editor, r): # this server fails at replay branches = hg_editor.branches_in_paths(r.paths, r.revnum, svn.checkpath, svn.list_files) deleted_branches = {} @@ -596,8 +467,8 @@ def stupid_svn_server_pull_rev(ui, svn, break if not is_closed: deleted_branches[branch] = branchtip - date = r.date.replace('T', ' ').replace('Z', '').split('.')[0] - date += ' -0000' + + date = hg_editor.fixdate(r.date) check_deleted_branches = set() for b in branches: parentctx = hg_editor.repo[hg_editor.get_parent_revision(r.revnum, b)] @@ -611,15 +482,15 @@ def stupid_svn_server_pull_rev(ui, svn, continue else: try: - files_touched, filectxfn2 = stupid_diff_branchrev( + files_touched, filectxfn2 = diff_branchrev( ui, svn, hg_editor, b, r, parentctx) except BadPatchApply, e: # Either this revision or the previous one does not exist. ui.status("Fetching entire revision: %s.\n" % e.args[0]) - files_touched, filectxfn2 = stupid_fetch_branchrev( + files_touched, filectxfn2 = fetch_branchrev( svn, hg_editor, b, branches[b], r, parentctx) - externals = stupid_fetch_externals(svn, branches[b], r, parentctx) + externals = fetch_externals(svn, branches[b], r, parentctx) if externals is not None: files_touched.append('.hgsvnexternals') @@ -642,9 +513,10 @@ def stupid_svn_server_pull_rev(ui, svn, for f in excluded: files_touched.remove(f) if parentctx.node() != node.nullid or files_touched: - # TODO(augie) remove this debug code? Or maybe it's sane to have it. for f in files_touched: if f: + # this is a case that really shouldn't ever happen, it means something + # is very wrong assert f[0] != '/' current_ctx = context.memctx(hg_editor.repo, [parentctx.node(), revlog.nullid], @@ -697,6 +569,3 @@ def stupid_svn_server_pull_rev(ui, svn, ha = hg_editor.repo.commitctx(current_ctx) ui.status('Marked branch %s as closed.\n' % (b or 'default')) hg_editor._save_metadata() - -class BadPatchApply(Exception): - pass diff --git a/svncommand.py b/svncommand.py deleted file mode 100644 --- a/svncommand.py +++ /dev/null @@ -1,104 +0,0 @@ -import os -import stat -import sys -import traceback - -from mercurial import hg -from mercurial import node -from mercurial import util as merc_util - -import svnwrap -import util -from util import register_subcommand, svn_subcommands, generate_help, svn_commands_nourl -# dirty trick to force demandimport to run my decorator anyway. -from utility_commands import print_wc_url -from fetch_command import fetch_revisions -from push_cmd import commit_from_rev -from diff_cmd import diff_command -from rebuildmeta import rebuildmeta -# shut up, pyflakes, we must import those -__x = [print_wc_url, fetch_revisions, commit_from_rev, diff_command, rebuildmeta] - -mode755 = (stat.S_IXUSR | stat.S_IXGRP| stat.S_IXOTH | stat.S_IRUSR | - stat.S_IRGRP| stat.S_IROTH | stat.S_IWUSR) -mode644 = (stat.S_IRUSR | stat.S_IRGRP| stat.S_IROTH | stat.S_IWUSR) - - -def svncmd(ui, repo, subcommand, *args, **opts): - if subcommand not in svn_subcommands: - candidates = [] - for c in svn_subcommands: - if c.startswith(subcommand): - candidates.append(c) - if len(candidates) == 1: - subcommand = candidates[0] - path = os.path.dirname(repo.path) - try: - commandfunc = svn_subcommands[subcommand] - if commandfunc not in svn_commands_nourl and not opts['svn_url']: - opts['svn_url'] = open(os.path.join(repo.path, 'svn', 'url')).read() - return commandfunc(ui, args=args, - hg_repo_path=path, - repo=repo, - **opts) - except TypeError: - tb = traceback.extract_tb(sys.exc_info()[2]) - if len(tb) == 1: - ui.status('Bad arguments for subcommand %s\n' % subcommand) - else: - raise - except KeyError, e: - tb = traceback.extract_tb(sys.exc_info()[2]) - if len(tb) == 1: - ui.status('Unknown subcommand %s\n' % subcommand) - else: - raise - - -def help_command(ui, args=None, **opts): - """show help for a given subcommands or a help overview - """ - if args: - subcommand = args[0] - if subcommand not in svn_subcommands: - candidates = [] - for c in svn_subcommands: - if c.startswith(subcommand): - candidates.append(c) - if len(candidates) == 1: - subcommand = candidates[0] - elif len(candidates) > 1: - ui.status('Ambiguous command. Could have been:\n%s\n' % - ' '.join(candidates)) - return - doc = svn_subcommands[subcommand].__doc__ - if doc is None: - doc = "No documentation available for %s." % subcommand - ui.status(doc.strip(), '\n') - return - ui.status(generate_help()) -help_command = register_subcommand('help')(help_command) - -def update(ui, args, repo, clean=False, **opts): - """update to a specified Subversion revision number - """ - assert len(args) == 1 - rev = int(args[0]) - path = os.path.join(repo.path, 'svn', 'rev_map') - answers = [] - for k,v in util.parse_revmap(path).iteritems(): - if k[0] == rev: - answers.append((v, k[1])) - if len(answers) == 1: - if clean: - return hg.clean(repo, answers[0][0]) - return hg.update(repo, answers[0][0]) - elif len(answers) == 0: - ui.status('Revision %s did not produce an hg revision.\n' % rev) - return 1 - else: - ui.status('Ambiguous revision!\n') - ui.status('\n'.join(['%s on %s' % (node.hex(a[0]), a[1]) for a in - answers]+[''])) - return 1 -update = register_subcommand('up')(update) diff --git a/svncommands.py b/svncommands.py new file mode 100644 --- /dev/null +++ b/svncommands.py @@ -0,0 +1,245 @@ +import os +import cPickle as pickle + +from mercurial import hg +from mercurial import node +from mercurial import util as hgutil + + +import hg_delta_editor +import svnwrap +import util +import utility_commands +import svnexternals + + +def incoming(ui, svn_url, hg_repo_path, skipto_rev=0, stupid=None, + tag_locations='tags', authors=None, filemap=None, **opts): + """show incoming revisions from Subversion + """ + svn_url = util.normalize_url(svn_url) + + initializing_repo = False + user, passwd = util.getuserpass(opts) + svn = svnwrap.SubversionRepo(svn_url, user, passwd) + author_host = "@%s" % svn.uuid + tag_locations = tag_locations.split(',') + hg_editor = hg_delta_editor.HgChangeReceiver(hg_repo_path, + ui_=ui, + subdir=svn.subdir, + author_host=author_host, + tag_locations=tag_locations, + authors=authors, + filemap=filemap) + if os.path.exists(hg_editor.uuid_file): + uuid = open(hg_editor.uuid_file).read() + assert uuid == svn.uuid + start = hg_editor.last_known_revision() + else: + open(hg_editor.uuid_file, 'w').write(svn.uuid) + open(hg_editor.svn_url_file, 'w').write(svn_url) + initializing_repo = True + start = skipto_rev + + if initializing_repo and start > 0: + raise hgutil.Abort('Revision skipping at repository initialization ' + 'remains unimplemented.') + + rev_stuff = (('revision', 'revnum'), + ('user', 'author'), + ('date', 'date'), + ('message', 'message') + ) + + ui.status('incoming changes from %s\n' % svn_url) + + for r in svn.revisions(start=start): + ui.status('\n') + for label, attr in rev_stuff: + l1 = label+':' + ui.status('%s%s\n' % (l1.ljust(13), + str(r.__getattribute__(attr)).strip(), )) + + +def rebuildmeta(ui, repo, hg_repo_path, args, **opts): + """rebuild hgsubversion metadata using values stored in revisions + """ + if len(args) != 1: + raise hgutil.Abort('You must pass the svn URI used to create this repo.') + uuid = None + url = args[0].rstrip('/') + user, passwd = util.getuserpass(opts) + svn = svnwrap.SubversionRepo(url, user, passwd) + subdir = svn.subdir + svnmetadir = os.path.join(repo.path, 'svn') + if not os.path.exists(svnmetadir): + os.makedirs(svnmetadir) + + revmap = open(os.path.join(svnmetadir, 'rev_map'), 'w') + revmap.write('1\n') + last_rev = -1 + branchinfo = {} + noderevnums = {} + for rev in repo: + ctx = repo[rev] + convinfo = ctx.extra().get('convert_revision', None) + if convinfo: + assert convinfo.startswith('svn:') + revpath, revision = convinfo[40:].split('@') + if subdir and subdir[0] != '/': + subdir = '/' + subdir + if subdir and subdir[-1] == '/': + subdir = subdir[:-1] + assert revpath.startswith(subdir), ('That does not look like the ' + 'right location in the repo.') + if uuid is None: + uuid = convinfo[4:40] + assert uuid == svn.uuid, 'UUIDs did not match!' + urlfile = open(os.path.join(svnmetadir, 'url'), 'w') + urlfile.write(url) + urlfile.close() + uuidfile = open(os.path.join(svnmetadir, 'uuid'), 'w') + uuidfile.write(uuid) + uuidfile.close() + commitpath = revpath[len(subdir)+1:] + if commitpath.startswith('branches'): + commitpath = commitpath[len('branches/'):] + elif commitpath == 'trunk': + commitpath = '' + else: + assert False, 'Unhandled case in rebuildmeta' + revmap.write('%s %s %s\n' % (revision, + node.hex(ctx.node()), + commitpath)) + revision = int(revision) + noderevnums[ctx.node()] = revision + if revision > last_rev: + last_rev = revision + branch = ctx.branch() + if branch == 'default': + branch = None + if branch not in branchinfo: + parent = ctx.parents()[0] + if (parent.node() in noderevnums + and parent.branch() != ctx.branch()): + parentbranch = parent.branch() + if parentbranch == 'default': + parentbranch = None + else: + parentbranch = None + branchinfo[branch] = (parentbranch, + noderevnums.get(parent.node(), 0), + revision) + for c in ctx.children(): + if c.branch() == 'closed-branches': + if branch in branchinfo: + del branchinfo[branch] + branchinfofile = open(os.path.join(svnmetadir, 'branch_info'), 'w') + pickle.dump(branchinfo, branchinfofile) + branchinfofile.close() + + # now handle tags + tagsinfo = {} + realtags = svn.tags + tagsleft = realtags.items() + while tagsleft: + tag, tagparent = tagsleft.pop(0) + source, rev = tagparent + if source.startswith('tags/'): + src = source[len('tags/'):] + if src in tagsinfo: + tagsinfo[tag] = tagsinfo[src] + elif src in realtags: + if (realtags[src][1] <= last_rev + or realtags[src][0].startswith('tags/')): + tagsleft.append(src) + else: + older_tags = svn.tags_at_rev(rev) + newsrc, newrev = older_tags[src] + tagsleft.append((tag, (newsrc, newrev))) + continue + else: + # determine the branch + assert not source.startswith('tags/'), "Tags can't be tags of other tags." + if source.startswith('branches/'): + source = source[len('branches/'):] + elif source == 'trunk': + source = None + else: + source = '../' + source + if rev <= last_rev and (source or 'default') in repo.branchtags(): + tagsinfo[tag] = source, rev + + tagsinfofile = open(os.path.join(svnmetadir, 'tag_info'), 'w') + pickle.dump(tagsinfo, tagsinfofile) + tagsinfofile.close() + + +def help(ui, args=None, **opts): + """show help for a given subcommands or a help overview + """ + if args: + subcommand = args[0] + if subcommand not in table: + candidates = [] + for c in table: + if c.startswith(subcommand): + candidates.append(c) + if len(candidates) == 1: + subcommand = candidates[0] + elif len(candidates) > 1: + ui.status('Ambiguous command. Could have been:\n%s\n' % + ' '.join(candidates)) + return + doc = table[subcommand].__doc__ + if doc is None: + doc = "No documentation available for %s." % subcommand + ui.status(doc.strip(), '\n') + return + ui.status(_helpgen()) + + +def update(ui, args, repo, clean=False, **opts): + """update to a specified Subversion revision number + """ + assert len(args) == 1 + rev = int(args[0]) + path = os.path.join(repo.path, 'svn', 'rev_map') + answers = [] + for k,v in util.parse_revmap(path).iteritems(): + if k[0] == rev: + answers.append((v, k[1])) + if len(answers) == 1: + if clean: + return hg.clean(repo, answers[0][0]) + return hg.update(repo, answers[0][0]) + elif len(answers) == 0: + ui.status('Revision %s did not produce an hg revision.\n' % rev) + return 1 + else: + ui.status('Ambiguous revision!\n') + ui.status('\n'.join(['%s on %s' % (node.hex(a[0]), a[1]) for a in + answers]+[''])) + return 1 + + +nourl = ['rebuildmeta', 'help'] + utility_commands.nourl +table = { + 'update': update, + 'help': help, + 'rebuildmeta': rebuildmeta, + 'incoming': incoming, + 'updateexternals': svnexternals.updateexternals, +} + +table.update(utility_commands.table) + + +def _helpgen(): + ret = ['hg svn ...', '', + 'subcommands for Subversion integration', '', + 'list of subcommands:', ''] + for name, func in sorted(table.items()): + short_description = (func.__doc__ or '').splitlines()[0] + ret.append(" %-10s %s" % (name, short_description)) + return '\n'.join(ret) + '\n' diff --git a/svnexternals.py b/svnexternals.py --- a/svnexternals.py +++ b/svnexternals.py @@ -1,6 +1,8 @@ import cStringIO -from mercurial import util as merc_util +import os, re, shutil, stat, subprocess +from mercurial import util as hgutil +from mercurial.i18n import _ class externalsfile(dict): """Map svn directories to lists of externals entries. @@ -24,7 +26,7 @@ class externalsfile(dict): def write(self): fp = cStringIO.StringIO() - for target in merc_util.sort(self): + for target in sorted(self): lines = self[target] if not lines: continue @@ -47,7 +49,7 @@ class externalsfile(dict): if line.startswith('['): line = line.strip() if line[-1] != ']': - raise merc_util.Abort('invalid externals section name: %s' % line) + raise hgutil.Abort('invalid externals section name: %s' % line) target = line[1:-1] if target == '.': target = '' @@ -56,7 +58,7 @@ class externalsfile(dict): if target is None or not line: continue self.setdefault(target, []).append(line[1:]) - + def diff(ext1, ext2): """Compare 2 externalsfile and yield tuples like (dir, value1, value2) where value1 is the external value in ext1 for dir or None, and @@ -70,3 +72,203 @@ def diff(ext1, ext2): for d in ext2: if d not in ext1: yield d, None, '\n'.join(ext2[d]) + +class BadDefinition(Exception): + pass + +re_defold = re.compile(r'^(.*?)\s+(?:-r\s*(\d+)\s+)?([a-zA-Z]+://.*)$') +re_defnew = re.compile(r'^(?:-r\s*(\d+)\s+)?((?:[a-zA-Z]+://|\^/).*)\s+(.*)$') +re_pegrev = re.compile(r'^(.*)@(\d+)$') +re_scheme = re.compile(r'^[a-zA-Z]+://') + +def parsedefinition(line): + """Parse an external definition line, return a tuple (path, rev, source) + or raise BadDefinition. + """ + # The parsing is probably not correct wrt path with whitespaces or + # potential quotes. svn documentation is not really talkative about + # these either. + line = line.strip() + m = re_defnew.search(line) + if m: + rev, source, path = m.group(1, 2, 3) + else: + m = re_defold.search(line) + if not m: + raise BadDefinition() + path, rev, source = m.group(1, 2, 3) + # Look for peg revisions + m = re_pegrev.search(source) + if m: + source, rev = m.group(1, 2) + return (path, rev, source) + +def parsedefinitions(ui, repo, svnroot, exts): + """Return (targetdir, revision, source) tuples. Fail if nested + targetdirs are detected. source is an svn project URL. + """ + defs = [] + for base in sorted(exts): + for line in exts[base]: + try: + path, rev, source = parsedefinition(line) + except BadDefinition: + ui.warn(_('ignoring invalid external definition: %r' % line)) + continue + if re_scheme.search(source): + pass + elif source.startswith('^/'): + source = svnroot + source[1:] + else: + ui.warn(_('ignoring unsupported non-fully qualified external: %r' % source)) + continue + wpath = hgutil.pconvert(os.path.join(base, path)) + wpath = hgutil.canonpath(repo.root, '', wpath) + defs.append((wpath, rev, source)) + # Check target dirs are not nested + defs.sort() + for i, d in enumerate(defs): + for d2 in defs[i+1:]: + if d2[0].startswith(d[0] + '/'): + raise hgutil.Abort(_('external directories cannot nest:\n%s\n%s') + % (d[0], d2[0])) + return defs + +def computeactions(ui, repo, svnroot, ext1, ext2): + + def listdefs(data): + defs = {} + exts = externalsfile() + exts.read(data) + for d in parsedefinitions(ui, repo, svnroot, exts): + defs[d[0]] = d + return defs + + ext1 = listdefs(ext1) + ext2 = listdefs(ext2) + for wp1 in ext1: + if wp1 in ext2: + yield 'u', ext2[wp1] + else: + yield 'd', ext1[wp1] + for wp2 in ext2: + if wp2 not in ext1: + yield 'u', ext2[wp2] + +def getsvninfo(svnurl): + """Return a tuple (url, root) for supplied svn URL or working + directory path. + """ + # Yes, this is ugly, but good enough for now + args = ['svn', 'info', '--xml', svnurl] + shell = os.name == 'nt' + p = subprocess.Popen(args, stdout=subprocess.PIPE, shell=shell) + stdout = p.communicate()[0] + if p.returncode: + raise hgutil.Abort(_('cannot get information about %s') + % svnurl) + m = re.search(r'(.*)', stdout, re.S) + if not m: + raise hgutil.Abort(_('cannot find SVN repository root from %s') + % svnurl) + root = m.group(1).rstrip('/') + + m = re.search(r'(.*)', stdout, re.S) + if not m: + raise hgutil.Abort(_('cannot find SVN repository URL from %s') % svnurl) + url = m.group(1) + + m = re.search(r']+revision="([^"]+)"', stdout, re.S) + if not m: + raise hgutil.Abort(_('cannot find SVN revision from %s') % svnurl) + rev = m.group(1) + return url, root, rev + +class externalsupdater: + def __init__(self, ui, repo): + self.repo = repo + self.ui = ui + + def update(self, wpath, rev, source): + path = self.repo.wjoin(wpath) + revspec = [] + if rev: + revspec = ['-r', rev] + if os.path.isdir(path): + exturl, extroot, extrev = getsvninfo(path) + if source == exturl: + if extrev != rev: + self.ui.status(_('updating external on %s@%s\n') % + (wpath, rev or 'HEAD')) + cwd = os.path.join(self.repo.root, path) + self.svn(['update'] + revspec, cwd) + return + self.delete(wpath) + cwd, dest = os.path.split(path) + cwd = os.path.join(self.repo.root, cwd) + if not os.path.isdir(cwd): + os.makedirs(cwd) + self.ui.status(_('fetching external %s@%s\n') % (wpath, rev or 'HEAD')) + self.svn(['co'] + revspec + [source, dest], cwd) + + def delete(self, wpath): + path = self.repo.wjoin(wpath) + if os.path.isdir(path): + self.ui.status(_('removing external %s\n') % wpath) + + def onerror(function, path, excinfo): + if function is not os.remove: + raise + # read-only files cannot be unlinked under Windows + s = os.stat(path) + if (s.st_mode & stat.S_IWRITE) != 0: + raise + os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE) + os.remove(path) + + shutil.rmtree(path, onerror=onerror) + return 1 + + def svn(self, args, cwd): + args = ['svn'] + args + self.ui.debug(_('updating externals: %r, cwd=%s\n') % (args, cwd)) + shell = os.name == 'nt' + subprocess.check_call(args, cwd=cwd, shell=shell) + +def updateexternals(ui, args, repo, **opts): + """update repository externals + """ + if len(args) > 1: + raise hgutil.Abort(_('updateexternals expects at most one changeset')) + node = None + if args: + node = args[0] + + try: + svnurl = file(repo.join('svn/url'), 'rb').read() + except: + raise hgutil.Abort(_('failed to retrieve original svn URL')) + svnroot = getsvninfo(svnurl)[1] + + # Retrieve current externals status + try: + oldext = file(repo.join('svn/externals'), 'rb').read() + except IOError: + oldext = '' + newext = '' + ctx = repo[node] + if '.hgsvnexternals' in ctx: + newext = ctx['.hgsvnexternals'].data() + + updater = externalsupdater(ui, repo) + actions = computeactions(ui, repo, svnroot, oldext, newext) + for action, ext in actions: + if action == 'u': + updater.update(ext[0], ext[1], ext[2]) + elif action == 'd': + updater.delete(ext[0]) + else: + raise hgutil.Abort(_('unknown update actions: %r') % action) + + file(repo.join('svn/externals'), 'wb').write(newext) + diff --git a/svnwrap/svn_swig_wrapper.py b/svnwrap/svn_swig_wrapper.py --- a/svnwrap/svn_swig_wrapper.py +++ b/svnwrap/svn_swig_wrapper.py @@ -4,9 +4,10 @@ import os import shutil import sys import tempfile +import urlparse +import urllib import hashlib import collections -import gc from svn import client from svn import core @@ -70,7 +71,7 @@ def _create_auth_baton(pool): # Give the client context baton a suite of authentication # providers.h platform_specific = ['svn_auth_get_gnome_keyring_simple_provider', - 'svn_auth_get_gnome_keyring_ssl_client_cert_pw_provider', + 'svn_auth_get_gnome_keyring_ssl_client_cert_pw_provider', 'svn_auth_get_keychain_simple_provider', 'svn_auth_get_keychain_ssl_client_cert_pw_provider', 'svn_auth_get_kwallet_simple_provider', @@ -81,13 +82,23 @@ def _create_auth_baton(pool): ] providers = [] - - for p in platform_specific: - if hasattr(core, p): - try: - providers.append(getattr(core, p)()) - except RuntimeError: - pass + # Platform-dependant authentication methods + getprovider = getattr(core, 'svn_auth_get_platform_specific_provider', + None) + if getprovider: + # Available in svn >= 1.6 + for name in ('gnome_keyring', 'keychain', 'kwallet', 'windows'): + for type in ('simple', 'ssl_client_cert_pw', 'ssl_server_trust'): + p = getprovider(name, type, pool) + if p: + providers.append(p) + else: + for p in platform_specific: + if hasattr(core, p): + try: + providers.append(getattr(core, p)()) + except RuntimeError: + pass providers += [ client.get_simple_provider(), @@ -100,6 +111,20 @@ def _create_auth_baton(pool): return core.svn_auth_open(providers, pool) +def parse_url(url): + """Parse a URL and return a tuple (username, password, url) + """ + scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) + user, passwd = None, None + if '@' in netloc: + userpass, netloc = netloc.split('@') + if ':' in userpass: + user, passwd = userpass.split(':') + user, passwd = urllib.unquote(user) or None, urllib.unquote(passwd) or None + else: + user = urllib.unquote(userpass) or None + url = urlparse.urlunparse((scheme, netloc, path, params, query, fragment)) + return (user, passwd, url) class Revision(tuple): """Wrapper for a Subversion revision. @@ -148,9 +173,12 @@ class SubversionRepo(object): This uses the SWIG Python bindings, and will only work on svn >= 1.4. It takes a required param, the URL. """ - def __init__(self, url='', username='', head=None): - self.svn_url = url - self.uname = username + def __init__(self, url='', username='', password='', head=None): + parsed = parse_url(url) + # --username and --password override URL credentials + self.username = username or parsed[0] + self.password = password or parsed[1] + self.svn_url = parsed[2] self.auth_baton_pool = core.Pool() self.auth_baton = _create_auth_baton(self.auth_baton_pool) # self.init_ra_and_client() assumes that a pool already exists @@ -169,26 +197,16 @@ class SubversionRepo(object): """Initializes the RA and client layers, because sometimes getting unified diffs runs the remote server out of open files. """ - # Debugging code; retained for possible later use. - if False: - gc.collect() - import pympler.muppy.tracker - try: - self.memory_tracker - try: - self.memory_tracker.print_diff(self.memory_base) - except: - print 'HOP' - self.memory_base = self.memory_tracker.create_summary() - except: - print 'HEP' - self.memory_tracker = pympler.muppy.tracker.SummaryTracker() - - # while we're in here we'll recreate our pool, but first, we clear it - # and destroy it to make possible leaks cause fatal errors. - self.pool.clear() - self.pool.destroy() + # while we're in here we'll recreate our pool self.pool = core.Pool() + if self.username: + core.svn_auth_set_parameter(self.auth_baton, + core.SVN_AUTH_PARAM_DEFAULT_USERNAME, + self.username) + if self.password: + core.svn_auth_set_parameter(self.auth_baton, + core.SVN_AUTH_PARAM_DEFAULT_PASSWORD, + self.password) self.client_context = client.create_context() self.client_context.auth_baton = self.auth_baton @@ -298,6 +316,8 @@ class SubversionRepo(object): source = hist.paths[path].copyfrom_path source_rev = 0 for p in hist.paths: + if not p.startswith(path): + continue if hist.paths[p].copyfrom_rev: # We assume that the revision of the source tree as it was # copied was actually the revision of the highest revision @@ -334,13 +354,7 @@ class SubversionRepo(object): The reason this is lazy is so that you can use the same repo object to perform RA calls to get deltas. """ - # NB: you'd think this would work, but you'd be wrong. I'm pretty - # convinced there must be some kind of svn bug here. - #return self.fetch_history_at_paths(['tags', 'trunk', 'branches'], - # start=start) - # However, we no longer need such filtering, as we gracefully handle - # branches located at arbitrary locations. - return self.fetch_history_at_paths([''], start=start, stop=stop, + return self.fetch_history_at_paths([''], start=start, chunk_size=chunk_size) def fetch_history_at_paths(self, paths, start=None, stop=None, @@ -412,7 +426,6 @@ class SubversionRepo(object): checksum = [] # internal dir batons can fall out of scope and get GCed before svn is # done with them. This prevents that (credit to gvn for the idea). - # TODO: verify that these are not the cause of our leaks batons = [edit_baton, ] def driver_cb(parent, path, pool): if not parent: @@ -439,14 +452,10 @@ class SubversionRepo(object): if action == 'modify': baton = editor.open_file(path, parent, base_revision, pool) elif action == 'add': - try: - frompath, fromrev = copies.get(path, (None, -1)) - if frompath: - frompath = self.svn_url + '/' + frompath - baton = editor.add_file(path, parent, frompath, fromrev, pool) - except (core.SubversionException, TypeError), e: #pragma: no cover - print e - raise + frompath, fromrev = copies.get(path, (None, -1)) + if frompath: + frompath = self.svn_url + '/' + frompath + baton = editor.add_file(path, parent, frompath, fromrev, pool) elif action == 'delete': baton = editor.delete_entry(path, base_revision, parent, pool) compute_delta = False @@ -475,11 +484,14 @@ class SubversionRepo(object): editor.close_edit(edit_baton, self.pool) def get_replay(self, revision, editor, oldest_rev_i_have=0): + # this method has a tendency to chew through RAM if you don't re-init + self.init_ra_and_client() e_ptr, e_baton = delta.make_editor(editor) try: ra.replay(self.ra, revision, oldest_rev_i_have, True, e_ptr, e_baton, self.pool) except core.SubversionException, e: #pragma: no cover + # can I depend on this number being constant? if (e.apr_err == core.SVN_ERR_RA_NOT_IMPLEMENTED or e.apr_err == core.SVN_ERR_UNSUPPORTED_FEATURE): raise SubversionRepoCanNotReplay, ('This Subversion server ' @@ -493,6 +505,9 @@ class SubversionRepo(object): """ if not self.hasdiff3: raise SubversionRepoCanNotDiff() + # works around an svn server keeping too many open files (observed + # in an svnserve from the 1.2 era) + self.init_ra_and_client() assert path[0] != '/' url = self.svn_url + '/' + path @@ -556,7 +571,7 @@ class SubversionRepo(object): notfound = (core.SVN_ERR_FS_NOT_FOUND, core.SVN_ERR_RA_DAV_PATH_NOT_FOUND) if e.apr_err in notfound: # File not found - raise IOError, e.args[0] + raise IOError() raise if mode == 'l': linkprefix = "link " @@ -598,11 +613,11 @@ class SubversionRepo(object): revision. """ dirpath = dirpath.strip('/') + pool = core.Pool() rpath = '/'.join([self.svn_url, dirpath]).strip('/') rev = optrev(revision) try: - entries = client.ls(rpath, rev, True, self.client_context, - self.pool) + entries = client.ls(rpath, rev, True, self.client_context, pool) except core.SubversionException, e: if e.apr_err == core.SVN_ERR_FS_NOT_FOUND: raise IOError('%s cannot be found at r%d' % (dirpath, revision)) diff --git a/tests/comprehensive/test_stupid_pull.py b/tests/comprehensive/test_stupid_pull.py --- a/tests/comprehensive/test_stupid_pull.py +++ b/tests/comprehensive/test_stupid_pull.py @@ -6,7 +6,7 @@ from mercurial import hg from mercurial import ui from tests import test_util -import fetch_command +import wrappers def _do_case(self, name): @@ -18,10 +18,8 @@ def _do_case(self, name): checkout_path = self.repo_path if subdir: checkout_path += '/' + subdir - fetch_command.fetch_revisions(ui.ui(), - svn_url=test_util.fileurl(checkout_path), - hg_repo_path=wc2_path, - stupid=True) + wrappers.clone(None, ui.ui(), source=test_util.fileurl(checkout_path), + dest=wc2_path, stupid=True, noupdate=True) self.repo2 = hg.repository(ui.ui(), wc2_path) self.assertEqual(self.repo.branchtags(), self.repo2.branchtags()) self.assertEqual(pickle.load(open(os.path.join(self.wc_path, '.hg', 'svn', 'tag_info'))), diff --git a/tests/fixtures/branch_create_with_dir_delete.sh b/tests/fixtures/branch_create_with_dir_delete.sh new file mode 100755 --- /dev/null +++ b/tests/fixtures/branch_create_with_dir_delete.sh @@ -0,0 +1,37 @@ +#!/bin/sh +mkdir temp +cd temp +svnadmin create repo +svn co file://`pwd`/repo wc +cd wc +mkdir branches trunk tags +svn add * +svn ci -m 'btt' +cd trunk +for a in alpha beta gamma delta ; do + echo $a > $a + svn add $a +done +svn ci -m 'Add files.' +mkdir al +echo foo > al/foo +svn add al +svn ci -m 'add directory al to delete on the branch' +cd .. +svn up +svn cp trunk branches/dev_branch +svn rm branches/dev_branch/al +svn ci -m 'branch' +cd branches/dev_branch +svn rm delta +echo narf > alpha +echo iota > iota +svn add iota +svn ci -m 'branch changes' +cd ../../../.. +svnadmin dump temp/repo > branch_create_with_dir_delete.svndump +echo +echo 'Complete.' +echo 'You probably want to clean up temp now.' +echo 'Dump in branch_create_with_dir_delete.svndump' +exit 0 diff --git a/tests/fixtures/branch_create_with_dir_delete.svndump b/tests/fixtures/branch_create_with_dir_delete.svndump new file mode 100644 --- /dev/null +++ b/tests/fixtures/branch_create_with_dir_delete.svndump @@ -0,0 +1,237 @@ +SVN-fs-dump-format-version: 2 + +UUID: 88f76347-3f0e-4a36-b1e1-8dec7ad11590 + +Revision-number: 0 +Prop-content-length: 56 +Content-length: 56 + +K 8 +svn:date +V 27 +2009-04-07T22:37:33.524401Z +PROPS-END + +Revision-number: 1 +Prop-content-length: 102 +Content-length: 102 + +K 7 +svn:log +V 3 +btt +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-04-07T22:37:34.076074Z +PROPS-END + +Node-path: branches +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: tags +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: trunk +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Revision-number: 2 +Prop-content-length: 110 +Content-length: 110 + +K 7 +svn:log +V 10 +Add files. +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-04-07T22:37:35.119223Z +PROPS-END + +Node-path: trunk/alpha +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 6 +Text-content-md5: 9f9f90dbe3e5ee1218c86b8839db1995 +Content-length: 16 + +PROPS-END +alpha + + +Node-path: trunk/beta +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 5 +Text-content-md5: f0cf2a92516045024a0c99147b28f05b +Content-length: 15 + +PROPS-END +beta + + +Node-path: trunk/delta +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 6 +Text-content-md5: d2840cc81bc032bd1141b56687d0f93c +Content-length: 16 + +PROPS-END +delta + + +Node-path: trunk/gamma +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 6 +Text-content-md5: 303febb9068384eca46b5b6516843b35 +Content-length: 16 + +PROPS-END +gamma + + +Revision-number: 3 +Prop-content-length: 140 +Content-length: 140 + +K 7 +svn:log +V 40 +add directory al to delete on the branch +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-04-07T22:37:36.075475Z +PROPS-END + +Node-path: trunk/al +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: trunk/al/foo +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 4 +Text-content-md5: d3b07384d113edec49eaa6238ad5ff00 +Content-length: 14 + +PROPS-END +foo + + +Revision-number: 4 +Prop-content-length: 105 +Content-length: 105 + +K 7 +svn:log +V 6 +branch +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-04-07T22:37:39.073788Z +PROPS-END + +Node-path: branches/dev_branch +Node-kind: dir +Node-action: add +Node-copyfrom-rev: 3 +Node-copyfrom-path: trunk + + +Node-path: branches/dev_branch/al +Node-action: delete + + +Revision-number: 5 +Prop-content-length: 114 +Content-length: 114 + +K 7 +svn:log +V 14 +branch changes +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-04-07T22:37:40.093715Z +PROPS-END + +Node-path: branches/dev_branch/alpha +Node-kind: file +Node-action: change +Text-content-length: 5 +Text-content-md5: 5e723ed52db2000686425ca28bc5ba4a +Content-length: 5 + +narf + + +Node-path: branches/dev_branch/iota +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 5 +Text-content-md5: ebcf3971120220589f1dfbf8d56e25b9 +Content-length: 15 + +PROPS-END +iota + + +Node-path: branches/dev_branch/delta +Node-action: delete + + diff --git a/tests/fixtures/empty_dir_in_trunk_not_repo_root.sh b/tests/fixtures/empty_dir_in_trunk_not_repo_root.sh new file mode 100755 --- /dev/null +++ b/tests/fixtures/empty_dir_in_trunk_not_repo_root.sh @@ -0,0 +1,18 @@ +#!/bin/sh +mkdir temp || exit 1 +cd temp +svnadmin create repo +svn co file://`pwd`/repo wc +pushd wc +mkdir -p project/trunk +svn add project +svn ci -m 'trunk' +cd project/trunk +echo a > a +mkdir narf +svn add a narf +svn ci -m 'file and empty dir' +popd +svnadmin dump repo > ../empty_dir_in_trunk_not_repo_root.svndump +echo 'dump in empty_dir_in_trunk_not_repo_root.svndump' +echo 'you can probably delete temp now' diff --git a/tests/fixtures/empty_dir_in_trunk_not_repo_root.svndump b/tests/fixtures/empty_dir_in_trunk_not_repo_root.svndump new file mode 100644 --- /dev/null +++ b/tests/fixtures/empty_dir_in_trunk_not_repo_root.svndump @@ -0,0 +1,89 @@ +SVN-fs-dump-format-version: 2 + +UUID: c866b883-4c03-404b-8609-dade481701a6 + +Revision-number: 0 +Prop-content-length: 56 +Content-length: 56 + +K 8 +svn:date +V 27 +2009-04-15T03:39:30.544797Z +PROPS-END + +Revision-number: 1 +Prop-content-length: 104 +Content-length: 104 + +K 7 +svn:log +V 5 +trunk +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-04-15T03:39:31.069518Z +PROPS-END + +Node-path: project +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: project/trunk +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Revision-number: 2 +Prop-content-length: 118 +Content-length: 118 + +K 7 +svn:log +V 18 +file and empty dir +K 10 +svn:author +V 5 +durin +K 8 +svn:date +V 27 +2009-04-15T03:39:32.069497Z +PROPS-END + +Node-path: project/trunk/a +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 2 +Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3 +Content-length: 12 + +PROPS-END +a + + +Node-path: project/trunk/narf +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + diff --git a/tests/fixtures/test_no_dates.svndump b/tests/fixtures/test_no_dates.svndump new file mode 100644 --- /dev/null +++ b/tests/fixtures/test_no_dates.svndump @@ -0,0 +1,133 @@ +SVN-fs-dump-format-version: 2 + +UUID: 3b9ee152-ff20-493a-9d97-5d739715df90 + +Revision-number: 0 +Prop-content-length: 56 +Content-length: 56 + +K 8 +svn:date +V 27 +2009-04-08T10:33:20.055686Z +PROPS-END + +Revision-number: 1 +Prop-content-length: 55 +Content-length: 55 + +K 7 +svn:log +V 4 +init +K 10 +svn:author +V 3 +djc +PROPS-END + +Node-path: branches +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: tags +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: trunk +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Revision-number: 2 +Prop-content-length: 56 +Content-length: 56 + +K 7 +svn:log +V 5 +add a +K 10 +svn:author +V 3 +djc +PROPS-END + +Node-path: trunk/a +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 2 +Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3 +Content-length: 12 + +PROPS-END +a + + +Revision-number: 3 +Prop-content-length: 98 +Content-length: 98 + +K 7 +svn:log +V 1 +b +K 10 +svn:author +V 3 +djc +K 8 +svn:date +V 27 +2009-04-08T10:35:21.843827Z +PROPS-END + +Node-path: trunk/a +Node-kind: file +Node-action: change +Text-content-length: 3 +Text-content-md5: daa8075d6ac5ff8d0c6d4650adb4ef29 +Content-length: 3 + +ab + + +Revision-number: 4 +Prop-content-length: 52 +Content-length: 52 + +K 7 +svn:log +V 1 +c +K 10 +svn:author +V 3 +djc +PROPS-END + +Node-path: trunk/a +Node-kind: file +Node-action: change +Text-content-length: 4 +Text-content-md5: 0bee89b07a248e27c83fc3d5951213c1 +Content-length: 4 + +abc + + diff --git a/tests/run.py b/tests/run.py --- a/tests/run.py +++ b/tests/run.py @@ -15,6 +15,7 @@ import test_fetch_mappings import test_fetch_renames import test_fetch_symlinks import test_fetch_truncated +import test_pull import test_push_command import test_push_renames import test_push_dirs @@ -22,6 +23,7 @@ import test_push_eol import test_rebuildmeta import test_tags import test_utility_commands +import test_urls def suite(): return unittest.TestSuite([test_binaryfiles.suite(), @@ -35,6 +37,7 @@ def suite(): test_fetch_renames.suite(), test_fetch_symlinks.suite(), test_fetch_truncated.suite(), + test_pull.suite(), test_push_command.suite(), test_push_renames.suite(), test_push_dirs.suite(), @@ -42,6 +45,7 @@ def suite(): test_rebuildmeta.suite(), test_tags.suite(), test_utility_commands.suite(), + test_urls.suite(), ]) if __name__ == '__main__': diff --git a/tests/test_diff.py b/tests/test_diff.py --- a/tests/test_diff.py +++ b/tests/test_diff.py @@ -2,7 +2,7 @@ import unittest from mercurial import ui -import diff_cmd +import wrappers import test_util @@ -32,7 +32,8 @@ class DiffTests(test_util.TestBase): ('alpha', 'alpha', 'alpha\n\nadded line\n'), ]) u = ui.ui() - diff_cmd.diff_command(u, self.repo, self.wc_path) + wrappers.diff(lambda x,y,z: None, + u, self.repo, svn=True) self.assertEqual(u.stream.getvalue(), expected_diff_output) diff --git a/tests/test_externals.py b/tests/test_externals.py --- a/tests/test_externals.py +++ b/tests/test_externals.py @@ -1,18 +1,18 @@ -import unittest +import os, unittest + +from mercurial import commands import svnexternals import test_util - class TestFetchExternals(test_util.TestBase): def test_externalsfile(self): f = svnexternals.externalsfile() f['t1'] = 'dir1 -r10 svn://foobar' f['t 2'] = 'dir2 -r10 svn://foobar' f['t3'] = ['dir31 -r10 svn://foobar', 'dir32 -r10 svn://foobar'] - - refext = """\ -[t 2] + + refext = """[t 2] dir2 -r10 svn://foobar [t1] dir1 -r10 svn://foobar @@ -29,55 +29,71 @@ class TestFetchExternals(test_util.TestB for t in f: self.assertEqual(f[t], f2[t]) + def test_parsedefinitions(self): + # Taken from svn book + samples = [ + ('third-party/sounds http://svn.example.com/repos/sounds', + ('third-party/sounds', None, 'http://svn.example.com/repos/sounds')), + ('third-party/skins -r148 http://svn.example.com/skinproj', + ('third-party/skins', '148', 'http://svn.example.com/skinproj')), + ('third-party/skins -r 148 http://svn.example.com/skinproj', + ('third-party/skins', '148', 'http://svn.example.com/skinproj')), + ('http://svn.example.com/repos/sounds third-party/sounds', + ('third-party/sounds', None, 'http://svn.example.com/repos/sounds')), + ('-r148 http://svn.example.com/skinproj third-party/skins', + ('third-party/skins', '148', 'http://svn.example.com/skinproj')), + ('-r 148 http://svn.example.com/skinproj third-party/skins', + ('third-party/skins', '148', 'http://svn.example.com/skinproj')), + ('http://svn.example.com/skin-maker@21 third-party/skins/toolkit', + ('third-party/skins/toolkit', '21', 'http://svn.example.com/skin-maker')), + ] + + for line, expected in samples: + self.assertEqual(expected, svnexternals.parsedefinition(line)) + def test_externals(self, stupid=False): repo = self._load_fixture_and_fetch('externals.svndump', stupid=stupid) - ref0 = """\ -[.] + ref0 = """[.] ^/externals/project1 deps/project1 """ self.assertEqual(ref0, repo[0]['.hgsvnexternals'].data()) - ref1 = """\ -[.] + ref1 = """[.] ^/externals/project1 deps/project1 ^/externals/project2 deps/project2 """ self.assertEqual(ref1, repo[1]['.hgsvnexternals'].data()) - ref2 = """\ -[.] + ref2 = """[.] ^/externals/project2 deps/project2 [subdir] ^/externals/project1 deps/project1 [subdir2] ^/externals/project1 deps/project1 """ - self.assertEqual(ref2, repo[2]['.hgsvnexternals'].data()) + actual = repo[2]['.hgsvnexternals'].data() + self.assertEqual(ref2, actual) - ref3 = """\ -[.] + ref3 = """[.] ^/externals/project2 deps/project2 [subdir] ^/externals/project1 deps/project1 """ self.assertEqual(ref3, repo[3]['.hgsvnexternals'].data()) - ref4 = """\ -[subdir] + ref4 = """[subdir] ^/externals/project1 deps/project1 """ self.assertEqual(ref4, repo[4]['.hgsvnexternals'].data()) - ref5 = """\ -[.] + ref5 = """[.] ^/externals/project2 deps/project2 [subdir2] ^/externals/project1 deps/project1 """ self.assertEqual(ref5, repo[5]['.hgsvnexternals'].data()) - ref6 = """\ -[.] + ref6 = """[.] ^/externals/project2 deps/project2 """ self.assertEqual(ref6, repo[6]['.hgsvnexternals'].data()) @@ -85,6 +101,29 @@ class TestFetchExternals(test_util.TestB def test_externals_stupid(self): self.test_externals(True) + def test_updateexternals(self): + def checkdeps(deps, nodeps, repo, rev=None): + svnexternals.updateexternals(ui, [rev], repo) + for d in deps: + p = os.path.join(repo.root, d) + self.assertTrue(os.path.isdir(p), + 'missing: %s@%r' % (d, rev)) + for d in nodeps: + p = os.path.join(repo.root, d) + self.assertTrue(not os.path.isdir(p), + 'unexpected: %s@%r' % (d, rev)) + + ui = test_util.ui.ui() + repo = self._load_fixture_and_fetch('externals.svndump', stupid=0) + commands.update(ui, repo) + checkdeps(['deps/project1'], [], repo, 0) + checkdeps(['deps/project1', 'deps/project2'], [], repo, 1) + checkdeps(['subdir/deps/project1', 'subdir2/deps/project1', + 'deps/project2'], + ['deps/project1'], repo, 2) + checkdeps(['subdir/deps/project1', 'deps/project2'], + ['subdir2/deps/project1'], repo, 3) + checkdeps(['subdir/deps/project1'], ['deps/project2'], repo, 4) class TestPushExternals(test_util.TestBase): def setUp(self): @@ -96,9 +135,8 @@ class TestPushExternals(test_util.TestBa def test_push_externals(self, stupid=False): # Add a new reference on an existing and non-existing directory changes = [ - ('.hgsvnexternals', '.hgsvnexternals', - """\ -[dir] + ('.hgsvnexternals', '.hgsvnexternals', + """[dir] ../externals/project2 deps/project2 [subdir1] ../externals/project1 deps/project1 @@ -115,9 +153,8 @@ class TestPushExternals(test_util.TestBa # Remove all references from one directory, add a new one # to the other (test multiline entries) changes = [ - ('.hgsvnexternals', '.hgsvnexternals', - """\ -[subdir1] + ('.hgsvnexternals', '.hgsvnexternals', + """[subdir1] ../externals/project1 deps/project1 ../externals/project2 deps/project2 """), diff --git a/tests/test_fetch_branches.py b/tests/test_fetch_branches.py --- a/tests/test_fetch_branches.py +++ b/tests/test_fetch_branches.py @@ -1,14 +1,24 @@ import unittest +from mercurial import hg from mercurial import node +from mercurial import ui import test_util +import wrappers class TestFetchBranches(test_util.TestBase): - def _load_fixture_and_fetch(self, fixture_name, stupid): + def _load_fixture_and_fetch(self, fixture_name, stupid, noupdate=True): return test_util.load_fixture_and_fetch(fixture_name, self.repo_path, - self.wc_path, stupid=stupid) + self.wc_path, stupid=stupid, + noupdate=noupdate) + + def _load_fixture_and_fetch_with_anchor(self, fixture_name, anchor): + test_util.load_svndump_fixture(self.repo_path, fixture_name) + source = '%s#%s' % (test_util.fileurl(self.repo_path), anchor) + wrappers.clone(None, ui.ui(), source=source, dest=self.wc_path) + return hg.repository(ui.ui(), self.wc_path) def test_unrelatedbranch(self, stupid=False): repo = self._load_fixture_and_fetch('unrelatedbranch.svndump', stupid) @@ -53,6 +63,27 @@ class TestFetchBranches(test_util.TestBa def test_replace_trunk_with_branch_stupid(self): self.test_replace_trunk_with_branch(stupid=True) + def test_branch_create_with_dir_delete_works(self, stupid=False): + repo = self._load_fixture_and_fetch('branch_create_with_dir_delete.svndump', + stupid) + self.assertEqual(repo['tip'].manifest().keys(), + ['alpha', 'beta', 'iota', 'gamma', ]) + + def test_branch_tip_update_to_default(self, stupid=False): + repo = self._load_fixture_and_fetch('unorderedbranch.svndump', + stupid, noupdate=False) + self.assertEqual(repo[None].branch(), 'default') + self.assertTrue('tip' not in repo[None].tags()) + + def test_branch_tip_update_to_default_stupid(self): + self.test_branch_tip_update_to_default(True) + + def test_branch_tip_update_to_branch_anchor(self): + repo = self._load_fixture_and_fetch_with_anchor( + 'unorderedbranch.svndump', 'branch') + self.assertEqual(repo[None].branch(), 'branch') + self.assertEqual(repo[None].parents()[0], repo[repo.branchheads()[0]]) + def suite(): all = [unittest.TestLoader().loadTestsFromTestCase(TestFetchBranches), ] diff --git a/tests/test_fetch_command.py b/tests/test_fetch_command.py --- a/tests/test_fetch_command.py +++ b/tests/test_fetch_command.py @@ -6,8 +6,14 @@ from mercurial import ui import test_util - class TestBasicRepoLayout(test_util.TestBase): + + def test_no_dates(self): + repo = self._load_fixture_and_fetch('test_no_dates.svndump') + local_epoch = repo[0].date() + self.assertEqual(local_epoch[0], local_epoch[1]) + self.assertEqual(repo[1].date(), repo[2].date()) + def test_fresh_fetch_single_rev(self): repo = self._load_fixture_and_fetch('single_rev.svndump') self.assertEqual(node.hex(repo['tip'].node()), @@ -18,7 +24,6 @@ class TestBasicRepoLayout(test_util.Test def test_fresh_fetch_two_revs(self): repo = self._load_fixture_and_fetch('two_revs.svndump') - # TODO there must be a better way than repo[0] for this check self.assertEqual(node.hex(repo[0].node()), '434ed487136c1b47c1e8f952edb4dc5a8e6328df') self.assertEqual(node.hex(repo['tip'].node()), @@ -27,7 +32,6 @@ class TestBasicRepoLayout(test_util.Test def test_branches(self): repo = self._load_fixture_and_fetch('simple_branch.svndump') - # TODO there must be a better way than repo[0] for this check self.assertEqual(node.hex(repo[0].node()), '434ed487136c1b47c1e8f952edb4dc5a8e6328df') self.assertEqual(node.hex(repo['tip'].node()), @@ -42,7 +46,6 @@ class TestBasicRepoLayout(test_util.Test def test_two_branches_with_heads(self): repo = self._load_fixture_and_fetch('two_heads.svndump') - # TODO there must be a better way than repo[0] for this check self.assertEqual(node.hex(repo[0].node()), '434ed487136c1b47c1e8f952edb4dc5a8e6328df') self.assertEqual(node.hex(repo['tip'].node()), @@ -57,14 +60,12 @@ class TestBasicRepoLayout(test_util.Test def test_many_special_cases_replay(self): repo = self._load_fixture_and_fetch('many_special_cases.svndump') - # TODO there must be a better way than repo[0] for this check self._many_special_cases_checks(repo) def test_many_special_cases_diff(self): repo = self._load_fixture_and_fetch('many_special_cases.svndump', stupid=True) - # TODO there must be a better way than repo[0] for this check self._many_special_cases_checks(repo) def _many_special_cases_checks(self, repo): @@ -85,9 +86,7 @@ class TestBasicRepoLayout(test_util.Test self.assertEqual(node.hex(repo['default'].node()), '434ed487136c1b47c1e8f952edb4dc5a8e6328df') assert 'README' not in repo - self.assertEqual(repo['tip'].branch(), - '../branches') - + assert '../branches' not in repo def test_files_copied_from_outside_btt(self): repo = self._load_fixture_and_fetch( @@ -99,7 +98,7 @@ class TestBasicRepoLayout(test_util.Test def test_file_renamed_in_from_outside_btt(self): repo = self._load_fixture_and_fetch( 'file_renamed_in_from_outside_btt.svndump') - self.assert_('LICENSE.file' in repo['tip']) + self.assert_('LICENSE.file' in repo['default']) def test_renamed_dir_in_from_outside_btt_not_repo_root(self): repo = self._load_fixture_and_fetch( @@ -141,9 +140,6 @@ class TestBasicRepoLayout(test_util.Test def test_fetch_when_trunk_has_no_files(self, stupid=False): repo = self._load_fixture_and_fetch('file_not_in_trunk_root.svndump', stupid=stupid) - print repo['tip'].branch() - print repo['tip'] - print repo['tip'].files() self.assertEqual(repo['tip'].branch(), 'default') def test_fetch_when_trunk_has_no_files_stupid(self): @@ -155,7 +151,6 @@ class TestStupidPull(test_util.TestBase) self.repo_path, self.wc_path, True) - # TODO there must be a better way than repo[0] for this check self.assertEqual(node.hex(repo[0].node()), '434ed487136c1b47c1e8f952edb4dc5a8e6328df') self.assertEqual(node.hex(repo['tip'].node()), diff --git a/tests/test_fetch_command_regexes.py b/tests/test_fetch_command_regexes.py --- a/tests/test_fetch_command_regexes.py +++ b/tests/test_fetch_command_regexes.py @@ -1,4 +1,4 @@ -import fetch_command +import stupid import unittest two_empties = """Index: __init__.py @@ -46,27 +46,27 @@ Name: svn:special class RegexTests(unittest.TestCase): def test_empty_file_re(self): - matches = fetch_command.empty_file_patch_wont_make_re.findall(two_empties) + matches = stupid.empty_file_patch_wont_make_re.findall(two_empties) assert sorted(matches) == ['__init__.py', 'bar/__init__.py'] def test_any_matches_just_one(self): pat = '''Index: trunk/django/contrib/admin/urls/__init__.py =================================================================== ''' - matches = fetch_command.any_file_re.findall(pat) + matches = stupid.any_file_re.findall(pat) assert len(matches) == 1 def test_special_re(self): - matches = fetch_command.property_special_set_re.findall(special_delta) + matches = stupid.property_special_set_re.findall(special_delta) assert len(matches) == 1 def test_any_file_re(self): - matches = fetch_command.any_file_re.findall(two_empties) + matches = stupid.any_file_re.findall(two_empties) assert sorted(matches) == ['__init__.py', 'bar/__init__.py', 'bar/test_muhaha.py'] def test_binary_file_re(self): - matches = fetch_command.binary_file_re.findall(binary_delta) + matches = stupid.binary_file_re.findall(binary_delta) assert matches == ['trunk/functional_tests/doc_tests/test_doctest_fixtures/doctest_fixtures_fixtures.pyc'] def suite(): diff --git a/tests/test_fetch_mappings.py b/tests/test_fetch_mappings.py --- a/tests/test_fetch_mappings.py +++ b/tests/test_fetch_mappings.py @@ -7,7 +7,7 @@ from mercurial import ui from mercurial import node import test_util -import fetch_command +import wrappers class MapTests(test_util.TestBase): @property @@ -23,11 +23,8 @@ class MapTests(test_util.TestBase): authormap = open(self.authors, 'w') authormap.write("Augie=Augie Fackler \n") authormap.close() - fetch_command.fetch_revisions(ui.ui(), - svn_url=test_util.fileurl(self.repo_path), - hg_repo_path=self.wc_path, - stupid=stupid, - authors=self.authors) + wrappers.clone(None, ui.ui(), source=test_util.fileurl(self.repo_path), + dest=self.wc_path, stupid=stupid, svn_authors=self.authors) self.assertEqual(self.repo[0].user(), 'Augie Fackler ') self.assertEqual(self.repo['tip'].user(), @@ -41,11 +38,9 @@ class MapTests(test_util.TestBase): authormap = open(self.authors, 'w') authormap.write("evil=Testy ") authormap.close() - fetch_command.fetch_revisions(ui.ui(), - svn_url=test_util.fileurl(self.repo_path), - hg_repo_path=self.wc_path, - stupid=stupid, - authors=self.authors) + wrappers.clone(None, ui.ui(), source=test_util.fileurl(self.repo_path), + dest=self.wc_path, stupid=stupid, + svn_authors=self.authors) self.assertEqual(self.repo[0].user(), 'Augie@5b65bade-98f3-4993-a01f-b7a6710da339') self.assertEqual(self.repo['tip'].user(), @@ -59,11 +54,9 @@ class MapTests(test_util.TestBase): filemap = open(self.filemap, 'w') filemap.write("include alpha\n") filemap.close() - fetch_command.fetch_revisions(ui.ui(), - svn_url=test_util.fileurl(self.repo_path), - hg_repo_path=self.wc_path, - stupid=stupid, - filemap=self.filemap) + wrappers.clone(None, ui.ui(), source=test_util.fileurl(self.repo_path), + dest=self.wc_path, stupid=stupid, + svn_filemap=self.filemap) self.assertEqual(node.hex(self.repo[0].node()), '88e2c7492d83e4bf30fbb2dcbf6aa24d60ac688d') self.assertEqual(node.hex(self.repo['default'].node()), 'e524296152246b3837fe9503c83b727075835155') @@ -75,11 +68,9 @@ class MapTests(test_util.TestBase): filemap = open(self.filemap, 'w') filemap.write("exclude alpha\n") filemap.close() - fetch_command.fetch_revisions(ui.ui(), - svn_url=test_util.fileurl(self.repo_path), - hg_repo_path=self.wc_path, - stupid=stupid, - filemap=self.filemap) + wrappers.clone(None, ui.ui(), source=test_util.fileurl(self.repo_path), + dest=self.wc_path, stupid=stupid, + svn_filemap=self.filemap) self.assertEqual(node.hex(self.repo[0].node()), '2c48f3525926ab6c8b8424bcf5eb34b149b61841') self.assertEqual(node.hex(self.repo['default'].node()), 'b37a3c0297b71f989064d9b545b5a478bbed7cc1') diff --git a/tests/test_fetch_truncated.py b/tests/test_fetch_truncated.py --- a/tests/test_fetch_truncated.py +++ b/tests/test_fetch_truncated.py @@ -3,7 +3,7 @@ import unittest from mercurial import hg from mercurial import ui -import fetch_command +import wrappers import test_util class TestFetchTruncatedHistory(test_util.TestBase): @@ -11,10 +11,9 @@ class TestFetchTruncatedHistory(test_uti # Test repository does not follow the usual layout test_util.load_svndump_fixture(self.repo_path, 'truncatedhistory.svndump') svn_url = test_util.fileurl(self.repo_path + '/project2') - fetch_command.fetch_revisions(ui.ui(), - svn_url=svn_url, - hg_repo_path=self.wc_path, - stupid=stupid) + wrappers.clone(None, ui.ui(), source=svn_url, + dest=self.wc_path, stupid=stupid, + noupdate=True) repo = hg.repository(ui.ui(), self.wc_path) # We are converting /project2/trunk coming from: diff --git a/tests/test_pull.py b/tests/test_pull.py new file mode 100644 --- /dev/null +++ b/tests/test_pull.py @@ -0,0 +1,72 @@ +import test_util + +import os.path +import subprocess +from mercurial import ui + +import wrappers + + +class TestPull(test_util.TestBase): + def setUp(self): + super(TestPull, self).setUp() + self.svn_wc = None + + def _load_fixture_and_fetch(self, fixture_name): + return test_util.load_fixture_and_fetch(fixture_name, self.repo_path, + self.wc_path, stupid=False, + noupdate=False) + + def _add_svn_rev(self, changes): + # changes is a dict of filename -> contents + if self.svn_wc is None: + self.svn_wc = os.path.join(self.tmpdir, 'testsvn_wc') + subprocess.call([ + 'svn', 'co', '-q', test_util.fileurl(self.repo_path), + self.svn_wc + ]) + + for filename, contents in changes.iteritems(): + # filenames are / separated + filename = filename.replace('/', os.path.sep) + filename = os.path.join(self.svn_wc, filename) + open(filename, 'w').write(contents) + subprocess.call(['svn', 'add', '-q', filename]) # may be redundant + subprocess.call([ + 'svn', 'commit', '-q', self.svn_wc, '-m', 'test changes']) + + def test_nochanges(self): + repo = self._load_fixture_and_fetch('single_rev.svndump') + state = repo.parents() + wrappers.pull(None, ui.ui(), repo) + self.assertEqual(state, repo.parents()) + + def test_onerevision_noupdate(self): + repo = self._load_fixture_and_fetch('single_rev.svndump') + state = repo.parents() + self._add_svn_rev({'trunk/alpha': 'Changed'}) + wrappers.pull(None, ui.ui(), repo) + self.assertEqual(state, repo.parents()) + self.assertTrue('tip' not in repo[None].tags()) + + def test_onerevision_doupdate(self): + repo = self._load_fixture_and_fetch('single_rev.svndump') + state = repo.parents() + self._add_svn_rev({'trunk/alpha': 'Changed'}) + wrappers.pull(None, ui.ui(), repo, update=True) + self.failIfEqual(state, repo.parents()) + self.assertTrue('tip' in repo[None].tags()) + + def test_onerevision_divergent(self): + repo = self._load_fixture_and_fetch('single_rev.svndump') + self.commitchanges((('alpha', 'alpha', 'Changed another way'),)) + state = repo.parents() + self._add_svn_rev({'trunk/alpha': 'Changed one way'}) + wrappers.pull(None, ui.ui(), repo, update=True) + self.assertEqual(state, repo.parents()) + self.assertTrue('tip' not in repo[None].tags()) + self.assertEqual(len(repo.heads()), 2) + +def suite(): + import unittest, sys + return unittest.findTestCases(sys.modules[__name__]) diff --git a/tests/test_push_command.py b/tests/test_push_command.py --- a/tests/test_push_command.py +++ b/tests/test_push_command.py @@ -8,9 +8,9 @@ from mercurial import hg from mercurial import node from mercurial import ui from mercurial import revlog +from mercurial import util as hgutil -import fetch_command -import push_cmd +import wrappers import test_util import time @@ -38,9 +38,8 @@ class PushOverSvnserveTests(test_util.Te args = ['svnserve', '-d', '--foreground', '-r', self.repo_path] self.svnserve_pid = subprocess.Popen(args).pid time.sleep(2) - fetch_command.fetch_revisions(ui.ui(), - svn_url='svn://localhost/', - hg_repo_path=self.wc_path) + wrappers.clone(None, ui.ui(), source='svn://localhost/', + dest=self.wc_path, noupdate=True) def tearDown(self): os.system('kill -9 %d' % self.svnserve_pid) @@ -70,10 +69,10 @@ class PushOverSvnserveTests(test_util.Te if not commit: return # some tests use this test as an extended setup. hg.update(repo, repo['tip'].node()) - push_cmd.push_revisions_to_subversion(ui.ui(), repo=self.repo, - hg_repo_path=self.wc_path, - svn_url='svn://localhost/') + oldauthor = repo['tip'].user() + wrappers.push(None, ui.ui(), repo=self.repo) tip = self.repo['tip'] + self.assertNotEqual(oldauthor, tip.user()) self.assertNotEqual(tip.node(), old_tip) self.assertEqual(tip.parents()[0].node(), expected_parent) self.assertEqual(tip['adding_file'].data(), 'foo') @@ -87,6 +86,32 @@ class PushTests(test_util.TestBase): self.repo_path, self.wc_path) + def test_cant_push_empty_ctx(self): + repo = self.repo + def file_callback(repo, memctx, path): + if path == 'adding_file': + return context.memfilectx(path=path, + data='foo', + islink=False, + isexec=False, + copied=False) + raise IOError() + ctx = context.memctx(repo, + (repo['default'].node(), node.nullid), + 'automated test', + [], + file_callback, + 'an_author', + '2008-10-07 20:59:48 -0500', + {'branch': 'default',}) + new_hash = repo.commitctx(ctx) + hg.update(repo, repo['tip'].node()) + old_tip = repo['tip'].node() + self.pushrevisions() + tip = self.repo['tip'] + self.assertEqual(tip.node(), old_tip) + + def test_push_to_default(self, commit=True): repo = self.repo old_tip = repo['tip'].node() @@ -147,10 +172,7 @@ class PushTests(test_util.TestBase): newhash = self.repo.commitctx(ctx) repo = self.repo hg.update(repo, newhash) - push_cmd.push_revisions_to_subversion(ui.ui(), - repo=repo, - svn_url=test_util.fileurl(self.repo_path), - hg_repo_path=self.wc_path) + wrappers.push(None, ui.ui(), repo=repo) self.assertEqual(self.repo['tip'].parents()[0].parents()[0].node(), oldtiphash) self.assertEqual(self.repo['tip'].files(), ['delta', ]) self.assertEqual(self.repo['tip'].manifest().keys(), @@ -263,11 +285,11 @@ class PushTests(test_util.TestBase): '2008-10-29 21:26:00 -0500', {'branch': 'default', }) new_hash = repo.commitctx(ctx) - hg.update(repo, repo['tip'].node()) + hg.clean(repo, repo['tip'].node()) self.pushrevisions() tip = self.repo['tip'] self.assertNotEqual(tip.node(), new_hash) - self.assert_('@' in tip.user()) + self.assert_('@' in self.repo['tip'].user()) self.assertEqual(tip['gamma'].flags(), 'x') self.assertEqual(tip['gamma'].data(), 'foo') self.assertEqual([x for x in tip.manifest().keys() if 'x' not in @@ -383,6 +405,23 @@ class PushTests(test_util.TestBase): self.assertEqual(tip.parents()[0]['alpha'].flags(), expected_flags) self.assertEqual(tip['alpha'].flags(), '') + def test_push_outdated_base_text(self): + self.test_push_two_revs() + changes = [('adding_file', 'adding_file', 'different_content', ), + ] + self.commitchanges(changes, parent='tip') + self.pushrevisions() + changes = [('adding_file', 'adding_file', + 'even_more different_content', ), + ] + self.commitchanges(changes, parent=3) + try: + self.pushrevisions() + assert False, 'This should have aborted!' + except hgutil.Abort, e: + self.assertEqual(e.args[0], + 'Base text was out of date, maybe rebase?') + def suite(): test_classes = [PushTests, PushOverSvnserveTests] diff --git a/tests/test_push_dirs.py b/tests/test_push_dirs.py --- a/tests/test_push_dirs.py +++ b/tests/test_push_dirs.py @@ -22,9 +22,9 @@ class TestPushDirectories(test_util.Test ] self.commitchanges(changes) self.pushrevisions() - self.assertEqual(self.svnls('trunk'), - ['d1', 'd1/a', 'd2', 'd2/a', 'd2/b', 'd31', - 'd31/d32', 'd31/d32/a', 'd31/d32/d33', + self.assertEqual(self.svnls('trunk'), + ['d1', 'd1/a', 'd2', 'd2/a', 'd2/b', 'd31', + 'd31/d32', 'd31/d32/a', 'd31/d32/d33', 'd31/d32/d33/d34', 'd31/d32/d33/d34/a']) # Add one revision with changed files only, no directory addition @@ -46,9 +46,50 @@ class TestPushDirectories(test_util.Test ] self.commitchanges(changes) self.pushrevisions() - self.assertEqual(self.svnls('trunk'), + self.assertEqual(self.svnls('trunk'), ['d2', 'd2/b', 'd31', 'd31/d32', 'd31/d32/a', 'd31/d32/d33']) + +class TestPushDirsNotAtRoot(test_util.TestBase): + def test_push_new_dir_project_root_not_repo_root(self): + test_util.load_fixture_and_fetch('fetch_missing_files_subdir.svndump', + self.repo_path, + self.wc_path, + subdir='foo') + changes = [('magic_new/a', 'magic_new/a', 'ohai', ), + ] + self.commitchanges(changes) + self.pushrevisions() + self.assertEqual(self.svnls('foo/trunk'), ['bar', + 'bar/alpha', + 'bar/beta', + 'bar/delta', + 'bar/gamma', + 'foo', + 'magic_new', + 'magic_new/a']) + + def test_push_new_file_existing_dir_root_not_repo_root(self): + test_util.load_fixture_and_fetch('empty_dir_in_trunk_not_repo_root.svndump', + self.repo_path, + self.wc_path, + subdir='project') + changes = [('narf/a', 'narf/a', 'ohai', ), + ] + self.commitchanges(changes) + self.assertEqual(self.svnls('project/trunk'), ['a', + 'narf',]) + self.pushrevisions() + self.assertEqual(self.svnls('project/trunk'), ['a', + 'narf', + 'narf/a']) + changes = [('narf/a', None, None, ), + ] + self.commitchanges(changes) + self.pushrevisions() + self.assertEqual(self.svnls('project/trunk'), ['a' ,]) + + def suite(): all = [unittest.TestLoader().loadTestsFromTestCase(TestPushDirectories), ] diff --git a/tests/test_rebuildmeta.py b/tests/test_rebuildmeta.py --- a/tests/test_rebuildmeta.py +++ b/tests/test_rebuildmeta.py @@ -5,10 +5,11 @@ import unittest from mercurial import hg from mercurial import ui -import test_util -import rebuildmeta +import svncommands import hg_delta_editor +import test_util + def _do_case(self, name, stupid): subdir = test_util.subdir.get(name, '') self._load_fixture_and_fetch(name, subdir=subdir, stupid=stupid) @@ -16,7 +17,7 @@ def _do_case(self, name, stupid): wc2_path = self.wc_path + '_clone' u = ui.ui() src, dest = hg.clone(u, self.wc_path, wc2_path, update=False) - rebuildmeta.rebuildmeta(u, + svncommands.rebuildmeta(u, dest, os.path.dirname(dest.path), args=[test_util.fileurl(self.repo_path + diff --git a/tests/test_urls.py b/tests/test_urls.py new file mode 100644 --- /dev/null +++ b/tests/test_urls.py @@ -0,0 +1,25 @@ +import test_util +import unittest +from svnwrap.svn_swig_wrapper import parse_url + +class TestSubversionUrls(test_util.TestBase): + def test_standard_url(self): + self.assertEqual((None, None, 'file:///var/svn/repo'), + parse_url('file:///var/svn/repo')) + + def test_user_url(self): + self.assertEqual(('joe', None, 'https://svn.testurl.com/repo'), + parse_url('https://joe@svn.testurl.com/repo')) + + def test_password_url(self): + self.assertEqual((None, 't3stpw', 'svn+ssh://svn.testurl.com/repo'), + parse_url('svn+ssh://:t3stpw@svn.testurl.com/repo')) + + def test_user_password_url(self): + self.assertEqual(('joe', 't3stpw', 'https://svn.testurl.com/repo'), + parse_url('https://joe:t3stpw@svn.testurl.com/repo')) + + +def suite(): + all = [unittest.TestLoader().loadTestsFromTestCase(TestSubversionUrls)] + return unittest.TestSuite(all) diff --git a/tests/test_util.py b/tests/test_util.py --- a/tests/test_util.py +++ b/tests/test_util.py @@ -13,12 +13,12 @@ from mercurial import hg from mercurial import node from mercurial import ui -import fetch_command -import push_cmd +import wrappers # Fixtures that need to be pulled at a subdirectory of the repo path subdir = {'truncatedhistory.svndump': '/project2', 'fetch_missing_files_subdir.svndump': '/foo', + 'empty_dir_in_trunk_not_repo_root.svndump': '/project', } FIXTURES = os.path.join(os.path.abspath(os.path.dirname(__file__)), @@ -45,14 +45,12 @@ def load_svndump_fixture(path, fixture_n proc.stdin.flush() proc.communicate() -def load_fixture_and_fetch(fixture_name, repo_path, wc_path, stupid=False, subdir=''): +def load_fixture_and_fetch(fixture_name, repo_path, wc_path, stupid=False, subdir='', noupdate=True): load_svndump_fixture(repo_path, fixture_name) if subdir: repo_path += '/' + subdir - fetch_command.fetch_revisions(ui.ui(), - svn_url=fileurl(repo_path), - hg_repo_path=wc_path, - stupid=stupid) + wrappers.clone(None, ui.ui(), source=fileurl(repo_path), + dest=wc_path, stupid=stupid, noupdate=noupdate) repo = hg.repository(ui.ui(), wc_path) return repo @@ -75,9 +73,9 @@ def rmtree(path): class MockUI(object): real_ui = ui.ui _isatty = False - def __init__(self, parentui=None): + def __init__(self, src=None): self.stream = StringIO.StringIO() - self.inner_ui = self.real_ui(parentui=parentui) + self.inner_ui = self.real_ui(src) def status(self, *args): self.stream.write(''.join(args)) @@ -88,6 +86,9 @@ class MockUI(object): def write(self, *args): self.stream.write(*args) + def copy(self): + return self.__class__(self.inner_ui) + def __getattr__(self, attr): return getattr(self.inner_ui, attr) @@ -97,6 +98,10 @@ class TestBase(unittest.TestCase): self.oldwd = os.getcwd() self.tmpdir = tempfile.mkdtemp( 'svnwrap_test', dir=os.environ.get('HGSUBVERSION_TEST_TEMP', None)) + self.hgrc = os.path.join(self.tmpdir, '.hgrc') + os.environ['HGRCPATH'] = self.hgrc + rc = open(self.hgrc, 'w') + rc.write('[extensions]\nhgsubversion=') self.repo_path = '%s/testrepo' % self.tmpdir self.wc_path = '%s/testrepo_wc' % self.tmpdir @@ -120,9 +125,7 @@ class TestBase(unittest.TestCase): def pushrevisions(self, stupid=False): before = len(self.repo) - push_cmd.push_revisions_to_subversion( - ui.ui(), repo=self.repo, hg_repo_path=self.wc_path, - svn_url=fileurl(self.repo_path), stupid=stupid) + wrappers.push(None, ui.ui(), repo=self.repo, stupid=stupid) after = len(self.repo) self.assertEqual(0, after - before) @@ -140,7 +143,7 @@ class TestBase(unittest.TestCase): entries.sort() return entries - def commitchanges(self, changes): + def commitchanges(self, changes, parent='tip'): """Commit changes to mercurial directory 'changes' is a sequence of tuples (source, dest, data). It can look @@ -153,7 +156,7 @@ class TestBase(unittest.TestCase): - (source, None, None) to remove source. """ repo = self.repo - parentctx = repo['tip'] + parentctx = repo[parent] changed, removed = [], [] for source, dest, newdata in changes: @@ -187,7 +190,7 @@ class TestBase(unittest.TestCase): '2008-10-07 20:59:48 -0500') nodeid = repo.commitctx(ctx) repo = self.repo - hg.update(repo, nodeid) + hg.clean(repo, nodeid) return nodeid def assertchanges(self, changes, ctx): diff --git a/tests/test_utility_commands.py b/tests/test_utility_commands.py --- a/tests/test_utility_commands.py +++ b/tests/test_utility_commands.py @@ -1,6 +1,7 @@ import os import unittest +from hgext import rebase from mercurial import ui from mercurial import hg from mercurial import revlog @@ -8,8 +9,8 @@ from mercurial import context from mercurial import node import utility_commands -import fetch_command import test_util +import wrappers expected_info_output = '''URL: %(repourl)s/%(branch)s Repository Root: %(repourl)s @@ -26,7 +27,7 @@ class UtilityTests(test_util.TestBase): self._load_fixture_and_fetch('two_heads.svndump') hg.update(self.repo, 'the_branch') u = ui.ui() - utility_commands.run_svn_info(u, self.repo, self.wc_path) + utility_commands.info(u, self.repo, self.wc_path) expected = (expected_info_output % {'date': '2008-10-08 01:39:05 +0000 (Wed, 08 Oct 2008)', 'repourl': test_util.fileurl(self.repo_path), @@ -36,7 +37,7 @@ class UtilityTests(test_util.TestBase): self.assertEqual(u.stream.getvalue(), expected) hg.update(self.repo, 'default') u = ui.ui() - utility_commands.run_svn_info(u, self.repo, self.wc_path) + utility_commands.info(u, self.repo, self.wc_path) expected = (expected_info_output % {'date': '2008-10-08 01:39:29 +0000 (Wed, 08 Oct 2008)', 'repourl': test_util.fileurl(self.repo_path), @@ -65,18 +66,34 @@ class UtilityTests(test_util.TestBase): {'branch': 'localbranch', }) new = self.repo.commitctx(ctx) hg.update(self.repo, new) - utility_commands.print_parent_revision(u, self.repo, self.wc_path) - self.assert_(node.hex(self.repo['the_branch'].node())[:8] in - u.stream.getvalue()) - self.assert_('the_branch' in u.stream.getvalue()) - self.assert_('r5' in u.stream.getvalue()) + wrappers.parent(lambda x, y: None, u, self.repo, svn=True) + self.assertEqual(u.stream.getvalue(), + 'changeset: 3:4e256962fc5d\n' + 'branch: the_branch\n' + 'user: durin@df2126f7-00ab-4d49-b42c-7e981dde0bcf\n' + 'date: Wed Oct 08 01:39:05 2008 +0000\n' + 'summary: add delta on the branch\n\n') + hg.update(self.repo, 'default') + # Make sure styles work u = ui.ui() - utility_commands.print_parent_revision(u, self.repo, self.wc_path) - self.assert_(node.hex(self.repo['default'].node())[:8] in - u.stream.getvalue()) - self.assert_('trunk' in u.stream.getvalue()) - self.assert_('r6' in u.stream.getvalue()) + wrappers.parent(lambda x, y: None, u, self.repo, svn=True, style='compact') + self.assertEqual(u.stream.getvalue(), + '4:1 1083037b18d8 2008-10-08 01:39 +0000 durin\n' + ' Add gamma on trunk.\n\n') + # custom templates too + u = ui.ui() + wrappers.parent(lambda x, y: None, u, self.repo, svn=True, template='{node}\n') + self.assertEqual(u.stream.getvalue(), '1083037b18d85cd84fa211c5adbaeff0fea2cd9f\n') + + u = ui.ui() + wrappers.parent(lambda x, y: None, u, self.repo, svn=True) + self.assertEqual(u.stream.getvalue(), + 'changeset: 4:1083037b18d8\n' + 'parent: 1:c95251e0dd04\n' + 'user: durin@df2126f7-00ab-4d49-b42c-7e981dde0bcf\n' + 'date: Wed Oct 08 01:39:29 2008 +0000\n' + 'summary: Add gamma on trunk.\n\n') def test_outgoing_output(self): self._load_fixture_and_fetch('two_heads.svndump') @@ -98,20 +115,27 @@ class UtilityTests(test_util.TestBase): {'branch': 'localbranch', }) new = self.repo.commitctx(ctx) hg.update(self.repo, new) - utility_commands.show_outgoing_to_svn(u, self.repo, self.wc_path) + wrappers.outgoing(lambda x,y,z: None, u, self.repo, svn=True) self.assert_(node.hex(self.repo['localbranch'].node())[:8] in u.stream.getvalue()) - self.assert_('testy' in u.stream.getvalue()) + self.assertEqual(u.stream.getvalue(), ('changeset: 5:6de15430fa20\n' + 'branch: localbranch\n' + 'tag: tip\n' + 'parent: 3:4e256962fc5d\n' + 'user: testy\n' + 'date: Sun Dec 21 16:32:00 2008 -0500\n' + 'summary: automated test\n' + '\n')) hg.update(self.repo, 'default') u = ui.ui() - utility_commands.show_outgoing_to_svn(u, self.repo, self.wc_path) - self.assertEqual(u.stream.getvalue(), 'No outgoing changes found.\n') + wrappers.outgoing(lambda x,y,z: None, u, self.repo, svn=True) + self.assertEqual(u.stream.getvalue(), 'no changes found\n') def test_url_output(self): self._load_fixture_and_fetch('two_revs.svndump') hg.update(self.repo, 'tip') u = ui.ui() - utility_commands.print_wc_url(u, self.repo, self.wc_path) + utility_commands.url(u, self.repo, self.wc_path) expected = test_util.fileurl(self.repo_path) + '\n' self.assertEqual(u.stream.getvalue(), expected) @@ -136,7 +160,7 @@ class UtilityTests(test_util.TestBase): self.assertEqual(self.repo['tip'].branch(), 'localbranch') beforerebasehash = self.repo['tip'].node() hg.update(self.repo, 'tip') - utility_commands.rebase_commits(ui.ui(), self.repo, os.path.dirname(self.repo.path)) + wrappers.rebase(rebase.rebase, ui.ui(), self.repo, svn=True) self.assertEqual(self.repo['tip'].branch(), 'localbranch') self.assertEqual(self.repo['tip'].parents()[0].parents()[0], self.repo[0]) self.assertNotEqual(beforerebasehash, self.repo['tip'].node()) @@ -145,13 +169,12 @@ class UtilityTests(test_util.TestBase): """Verify url gets normalized on initial clone. """ test_util.load_svndump_fixture(self.repo_path, 'two_revs.svndump') - fetch_command.fetch_revisions(ui.ui(), - svn_url=test_util.fileurl(self.repo_path)+'/', - hg_repo_path=self.wc_path, - stupid=False) + wrappers.clone(None, ui.ui(), + source=test_util.fileurl(self.repo_path) + '/', + dest=self.wc_path, stupid=False) hg.update(self.repo, 'tip') u = ui.ui() - utility_commands.print_wc_url(u, self.repo, self.wc_path) + utility_commands.url(u, self.repo, self.wc_path) expected = test_util.fileurl(self.repo_path) + '\n' self.assertEqual(u.stream.getvalue(), expected) @@ -159,16 +182,34 @@ class UtilityTests(test_util.TestBase): """Verify url gets normalized on initial clone. """ test_util.load_svndump_fixture(self.repo_path, 'ignores.svndump') - fetch_command.fetch_revisions(ui.ui(), - svn_url=test_util.fileurl(self.repo_path)+'/', - hg_repo_path=self.wc_path, - stupid=False) + wrappers.clone(None, ui.ui(), + source=test_util.fileurl(self.repo_path) + '/', + dest=self.wc_path, stupid=False) hg.update(self.repo, 'tip') u = ui.ui() - utility_commands.generate_ignore(u, self.repo, self.wc_path) + utility_commands.genignore(u, self.repo, self.wc_path) self.assertEqual(open(os.path.join(self.wc_path, '.hgignore')).read(), '.hgignore\nsyntax:glob\nblah\notherblah\nbaz/magic\n') + def test_list_authors(self): + test_util.load_svndump_fixture(self.repo_path, + 'replace_trunk_with_branch.svndump') + u = ui.ui() + utility_commands.listauthors(u, + args=[test_util.fileurl(self.repo_path)], + authors=None) + self.assertEqual(u.stream.getvalue(), 'Augie\nevil\n') + + + def test_list_authors_map(self): + test_util.load_svndump_fixture(self.repo_path, + 'replace_trunk_with_branch.svndump') + author_path = os.path.join(self.repo_path, 'authors') + utility_commands.listauthors(ui.ui(), + args=[test_util.fileurl(self.repo_path)], + authors=author_path) + self.assertEqual(open(author_path).read(), 'Augie=\nevil=\n') + def suite(): all = [unittest.TestLoader().loadTestsFromTestCase(UtilityTests), diff --git a/tools/bisect-find-bad.sh b/tools/bisect-find-bad.sh --- a/tools/bisect-find-bad.sh +++ b/tools/bisect-find-bad.sh @@ -1,10 +1,4 @@ #!/bin/bash -/bin/rm -rf * -svn export `hg svn info 2> /dev/null | grep '^URL: ' | sed 's/URL: //'` -`hg svn parent | sed 's/.*: //;s/ .*//'` . --force -if [ `hg st | wc -l` = 0 ] ; then - exit 0 -else - hg revert --all - hg purge - exit 1 -fi +. $(dirname $0)/common.sh +verify_current_revision $1 +exit $? diff --git a/tools/common.sh b/tools/common.sh new file mode 100644 --- /dev/null +++ b/tools/common.sh @@ -0,0 +1,26 @@ +function verify_current_revision() +{ + /bin/rm -rf * + exportcmd="svn export `hg svn info 2> /dev/null | grep '^URL: ' | sed 's/URL: //'` -r`hg svn info | grep ^Revision | sed 's/.*: //;s/ .*//'` . --force" + `echo $exportcmd` > /dev/null + x=$? + if [[ "$x" != "0" ]] ; then + echo $exportcmd + echo 'export failed!' + return 255 + fi + if [[ "`hg st | wc -l | python -c 'import sys; print sys.stdin.read().strip()'`" == "0" ]] ; then + return 0 + else + if [[ $1 != "keep" ]] ; then + revert_all_files + fi + return 1 + fi +} + +function revert_all_files() +{ + hg revert --all + hg purge +} diff --git a/tools/converttags.sh b/tools/converttags.sh new file mode 100644 --- /dev/null +++ b/tools/converttags.sh @@ -0,0 +1,9 @@ +#!/bin/bash +# This shell script exists to convert hgsubversion tags to real hg tags. +# This will go away once hgsubversion's tags handling uses .hgtags directly. +hg tags | sed -E 's/([a-zA-Z0-9./_-]*) [^:]*:([a-f0-9]*)/\2 \1/' | grep -v ' tip$' > .hgtags +cat .hgtags | sed "$( +for x in `cat .hgtags| cut -f 1 -d ' '` ;do + echo -n "s/$x/" ; hg log --template '{node}' -r $x ; echo -n '/g; ' +done)" > .hgtags.new +mv .hgtags.new .hgtags diff --git a/tools/verify-all-heads.sh b/tools/verify-all-heads.sh --- a/tools/verify-all-heads.sh +++ b/tools/verify-all-heads.sh @@ -1,7 +1,9 @@ #!/bin/sh -for b in `hg branches | cut -f 1 -d ' '` ; do +. $(dirname $0)/common.sh + +for b in `hg branches -a | cut -f 1 -d ' ' | grep -v closed-branches` ; do hg co $b || break echo Verifying $b - $(dirname $0)/bisect-find-bad.sh > /dev/null || break + verify_current_revision keep > /dev/null || break echo $b Verified. done diff --git a/util.py b/util.py --- a/util.py +++ b/util.py @@ -3,18 +3,15 @@ import shutil from mercurial import hg from mercurial import node +from mercurial import util as hgutil -svn_subcommands = { } -def register_subcommand(name): - def inner(fn): - svn_subcommands[name] = fn - return fn - return inner -svn_commands_nourl = set() -def command_needs_no_url(fn): - svn_commands_nourl.add(fn) - return fn +def getuserpass(opts): + # DO NOT default the user to hg's getuser(). If you provide + # *any* default username to Subversion, it won't use any remembered + # username for the desired realm, breaking OS X Keychain support, + # GNOME keyring support, and all similar tools. + return opts.get('username', None), opts.get('password', '') def version(ui): @@ -26,30 +23,15 @@ def version(ui): return node.hex(ver)[:12] -def generate_help(): - ret = ['hg svn ...', '', - 'subcommands for Subversion integration', '', - 'list of subcommands:', ''] +def normalize_url(svnurl): + if svnurl.startswith('svn+http'): + svnurl = svnurl[4:] + url, revs, checkout = hg.parseurl(svnurl) + url = url.rstrip('/') + if checkout: + url = '%s#%s' % (url, checkout) + return url - for name, func in sorted(svn_subcommands.items()): - short_description = (func.__doc__ or '').splitlines()[0] - ret.append(" %-10s %s" % (name, short_description)) - - return "\n".join(ret) + '\n' - - -def normalize_url(svn_url): - return svn_url.rstrip('/') - - -def wipe_all_files(hg_wc_path): - files = [f for f in os.listdir(hg_wc_path) if f != '.hg'] - for f in files: - f = os.path.join(hg_wc_path, f) - if os.path.isdir(f): - shutil.rmtree(f) - else: - os.remove(f) REVMAP_FILE_VERSION = 1 def parse_revmap(revmap_filename): @@ -94,13 +76,13 @@ def outgoing_revisions(ui, repo, hg_edit and sourcerev.node() != node.nullid): outgoing_rev_hashes.append(sourcerev.node()) sourcerev = sourcerev.parents() - assert len(sourcerev) == 1 + if len(sourcerev) != 1: + raise hgutil.Abort("Sorry, can't find svn parent of a merge revision.") sourcerev = sourcerev[0] if sourcerev.node() != node.nullid: return outgoing_rev_hashes def build_extra(revnum, branch, uuid, subdir): - # TODO this needs to be fixed with the new revmap extra = {} branchpath = 'trunk' if branch: @@ -133,3 +115,12 @@ def describe_revision(ui, r): def describe_commit(ui, h, b): ui.note(' committed to "%s" as %s\n' % ((b or 'default'), node.short(h))) + + +def swap_out_encoding(new_encoding="UTF-8"): + """ Utility for mercurial incompatibility changes, can be removed after 1.3 + """ + from mercurial import encoding + old = encoding.encoding + encoding.encoding = new_encoding + return old diff --git a/utility_commands.py b/utility_commands.py --- a/utility_commands.py +++ b/utility_commands.py @@ -1,47 +1,33 @@ import os -import mercurial -from mercurial import cmdutil -from mercurial import node -from mercurial import util as mutil -from hgext import rebase +from mercurial import util as hgutil import svnwrap +import cmdutil import util import hg_delta_editor -def print_wc_url(ui, repo, hg_repo_path, **opts): +def url(ui, repo, hg_repo_path, **opts): """show the location (URL) of the Subversion repository """ hge = hg_delta_editor.HgChangeReceiver(hg_repo_path, ui_=ui) ui.status(hge.url, '\n') -print_wc_url = util.register_subcommand('url')(print_wc_url) -def find_wc_parent_rev(ui, repo, hge, svn_commit_hashes): - """Find the svn parent revision of the repo's dirstate. - """ - workingctx = repo.parents()[0] - o_r = util.outgoing_revisions(ui, repo, hge, svn_commit_hashes, workingctx.node()) - if o_r: - workingctx = repo[o_r[-1]].parents()[0] - return workingctx - - -def generate_ignore(ui, repo, hg_repo_path, force=False, **opts): +def genignore(ui, repo, hg_repo_path, force=False, **opts): """generate .hgignore from svn:ignore properties. """ ignpath = os.path.join(hg_repo_path, '.hgignore') if not force and os.path.exists(ignpath): - raise mutil.Abort('not overwriting existing .hgignore, try --force?') + raise hgutil.Abort('not overwriting existing .hgignore, try --force?') ignorefile = open(ignpath, 'w') ignorefile.write('.hgignore\nsyntax:glob\n') hge = hg_delta_editor.HgChangeReceiver(hg_repo_path, ui_=ui) svn_commit_hashes = dict(zip(hge.revmap.itervalues(), hge.revmap.iterkeys())) - parent = find_wc_parent_rev(ui, repo, hge, svn_commit_hashes) + parent = cmdutil.parentrev(ui, repo, hge, svn_commit_hashes) r, br = svn_commit_hashes[parent.node()] if br == None: branchpath = 'trunk' @@ -50,7 +36,8 @@ def generate_ignore(ui, repo, hg_repo_pa url = hge.url if url[-1] == '/': url = url[:-1] - svn = svnwrap.SubversionRepo(url) + user, passwd = util.getuserpass(opts) + svn = svnwrap.SubversionRepo(url, user, passwd) dirs = [''] + [d[0] for d in svn.list_files(branchpath, r) if d[1] == 'd'] for dir in dirs: props = svn.list_props('%s/%s/' % (branchpath,dir), r) @@ -61,18 +48,21 @@ def generate_ignore(ui, repo, hg_repo_pa ignorefile.write('%s/%s\n' % (dir, prop)) else: ignorefile.write('%s\n' % prop) -generate_ignore = util.register_subcommand('genignore')(generate_ignore) -def run_svn_info(ui, repo, hg_repo_path, **opts): +def info(ui, repo, hg_repo_path, **opts): """show Subversion details similar to `svn info' """ hge = hg_delta_editor.HgChangeReceiver(hg_repo_path, ui_=ui) svn_commit_hashes = dict(zip(hge.revmap.itervalues(), hge.revmap.iterkeys())) - parent = find_wc_parent_rev(ui, repo, hge, svn_commit_hashes) - r, br = svn_commit_hashes[parent.node()] + parent = cmdutil.parentrev(ui, repo, hge, svn_commit_hashes) + pn = parent.node() + if pn not in svn_commit_hashes: + ui.status('Not a child of an svn revision.\n') + return 0 + r, br = svn_commit_hashes[pn] subdir = parent.extra()['convert_revision'][40:].split('@')[0] if br == None: branchpath = '/trunk' @@ -102,102 +92,42 @@ Last Changed Date: %(date)s\n''' % 'author': author, 'revision': r, # TODO I'd like to format this to the user's local TZ if possible - 'date': mutil.datestr(parent.date(), - '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)') + 'date': hgutil.datestr(parent.date(), + '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)') }) -run_svn_info = util.register_subcommand('info')(run_svn_info) -def print_parent_revision(ui, repo, hg_repo_path, **opts): - """show Mercurial & Subversion parents of the working dir or revision +def listauthors(ui, args, authors=None, **opts): + """list all authors in a Subversion repository """ - hge = hg_delta_editor.HgChangeReceiver(hg_repo_path, - ui_=ui) - svn_commit_hashes = dict(zip(hge.revmap.itervalues(), - hge.revmap.iterkeys())) - ha = find_wc_parent_rev(ui, repo, hge, svn_commit_hashes) - if ha.node() != node.nullid: - r, br = svn_commit_hashes[ha.node()] - ui.status('Working copy parent revision is %s: r%s on %s\n' % - (ha, r, br or 'trunk')) + if not len(args): + ui.status('No repository specified.\n') + return + svn = svnwrap.SubversionRepo(util.normalize_url(args[0])) + author_set = set() + for rev in svn.revisions(): + author_set.add(str(rev.author)) # So None becomes 'None' + if authors: + authorfile = open(authors, 'w') + authorfile.write('%s=\n' % '=\n'.join(sorted(author_set))) + authorfile.close() else: - ui.status('Working copy seems to have no parent svn revision.\n') - return 0 -print_parent_revision = util.register_subcommand('parent')(print_parent_revision) - - -def rebase_commits(ui, repo, hg_repo_path, extrafn=None, sourcerev=None, **opts): - """rebase current unpushed revisions onto the Subversion head - - This moves a line of development from making its own head to the top of - Subversion development, linearizing the changes. In order to make sure you - rebase on top of the current top of Subversion work, you should probably run - 'hg svn pull' before running this. - """ - if extrafn is None: - def extrafn2(ctx, extra): - """defined here so we can add things easily. - """ - extra['branch'] = ctx.branch() - extrafn = extrafn2 - if sourcerev is None: - sourcerev = repo.parents()[0].node() - hge = hg_delta_editor.HgChangeReceiver(hg_repo_path, - ui_=ui) - svn_commit_hashes = dict(zip(hge.revmap.itervalues(), - hge.revmap.iterkeys())) - o_r = util.outgoing_revisions(ui, repo, hge, svn_commit_hashes, sourcerev=sourcerev) - if not o_r: - ui.status('Nothing to rebase!\n') - return 0 - if len(repo[sourcerev].children()): - ui.status('Refusing to rebase non-head commit like a coward\n') - return 0 - parent_rev = repo[o_r[-1]].parents()[0] - target_rev = parent_rev - p_n = parent_rev.node() - exhausted_choices = False - while target_rev.children() and not exhausted_choices: - for c in target_rev.children(): - exhausted_choices = True - n = c.node() - if (n in svn_commit_hashes and - svn_commit_hashes[n][1] == svn_commit_hashes[p_n][1]): - target_rev = c - exhausted_choices = False - break - if parent_rev == target_rev: - ui.status('Already up to date!\n') - return 0 - # TODO this is really hacky, there must be a more direct way - return rebase.rebase(ui, repo, dest=node.hex(target_rev.node()), - base=node.hex(sourcerev), - extrafn=extrafn) -rebase_commits = util.register_subcommand('rebase')(rebase_commits) - - -def show_outgoing_to_svn(ui, repo, hg_repo_path, **opts): - """show changesets not found in the Subversion repository - """ - hge = hg_delta_editor.HgChangeReceiver(hg_repo_path, - ui_=ui) - svn_commit_hashes = dict(zip(hge.revmap.itervalues(), - hge.revmap.iterkeys())) - o_r = util.outgoing_revisions(ui, repo, hge, svn_commit_hashes, repo.parents()[0].node()) - if not (o_r and len(o_r)): - ui.status('No outgoing changes found.\n') - return 0 - displayer = cmdutil.show_changeset(ui, repo, opts, buffered=False) - for node in reversed(o_r): - displayer.show(repo[node]) -show_outgoing_to_svn = util.register_subcommand('outgoing')(show_outgoing_to_svn) + ui.status('%s\n' % '\n'.join(sorted(author_set))) def version(ui, **opts): """Show current version of hg and hgsubversion. """ - ui.status('hg: %s\n' % mutil.version()) + ui.status('hg: %s\n' % hgutil.version()) ui.status('svn bindings: %s\n' % svnwrap.version()) ui.status('hgsubversion: %s\n' % util.version(ui)) -version = util.register_subcommand('version')(version) -version = util.command_needs_no_url(version) + + +nourl = ['version', 'listauthors'] +table = { + 'url': url, + 'genignore': genignore, + 'info': info, + 'listauthors': listauthors, + 'version': version, +} diff --git a/wrappers.py b/wrappers.py new file mode 100644 --- /dev/null +++ b/wrappers.py @@ -0,0 +1,401 @@ +import os + +from hgext import rebase as hgrebase + +from mercurial import cmdutil as hgcmdutil +from mercurial import commands +from mercurial import patch +from mercurial import hg +from mercurial import util as hgutil +from mercurial import node +from mercurial import i18n + +from svn import core +from svn import delta + +import cmdutil +import hg_delta_editor +import stupid as stupidmod +import svnwrap +import util + +def parent(orig, ui, repo, *args, **opts): + """show Mercurial & Subversion parents of the working dir or revision + """ + if not opts.get('svn', False): + return orig(ui, repo, *args, **opts) + hge = hg_delta_editor.HgChangeReceiver(repo=repo) + svn_commit_hashes = dict(zip(hge.revmap.itervalues(), + hge.revmap.iterkeys())) + ha = cmdutil.parentrev(ui, repo, hge, svn_commit_hashes) + if ha.node() == node.nullid: + raise hgutil.Abort('No parent svn revision!') + displayer = hgcmdutil.show_changeset(ui, repo, opts, buffered=False) + displayer.show(ha) + return 0 + + +def outgoing(orig, ui, repo, dest=None, *args, **opts): + """show changesets not found in the Subversion repository + """ + svnurl = repo.ui.expandpath(dest or 'default-push', dest or 'default') + if not (cmdutil.issvnurl(svnurl) or opts.get('svn', False)): + return orig(ui, repo, dest, *args, **opts) + + # split off #rev; TODO implement --revision/#rev support + svnurl, revs, checkout = hg.parseurl(svnurl, opts.get('rev')) + hge = hg_delta_editor.HgChangeReceiver(repo=repo) + svn_commit_hashes = dict(zip(hge.revmap.itervalues(), + hge.revmap.iterkeys())) + o_r = util.outgoing_revisions(ui, repo, hge, svn_commit_hashes, + repo.parents()[0].node()) + if not (o_r and len(o_r)): + ui.status('no changes found\n') + return 0 + displayer = hgcmdutil.show_changeset(ui, repo, opts, buffered=False) + for node in reversed(o_r): + displayer.show(repo[node]) + + +def diff(orig, ui, repo, *args, **opts): + """show a diff of the most recent revision against its parent from svn + """ + if not opts.get('svn', False) or opts.get('change', None): + return orig(ui, repo, *args, **opts) + svn_commit_hashes = {} + hge = hg_delta_editor.HgChangeReceiver(repo=repo) + svn_commit_hashes = dict(zip(hge.revmap.itervalues(), + hge.revmap.iterkeys())) + if not opts.get('rev', None): + parent = repo.parents()[0] + o_r = util.outgoing_revisions(ui, repo, hge, svn_commit_hashes, + parent.node()) + if o_r: + parent = repo[o_r[-1]].parents()[0] + opts['rev'] = ['%s:.' % node.hex(parent.node()), ] + node1, node2 = hgcmdutil.revpair(repo, opts['rev']) + baserev, _junk = svn_commit_hashes.get(node1, (-1, 'junk', )) + newrev, _junk = svn_commit_hashes.get(node2, (-1, 'junk', )) + it = patch.diff(repo, node1, node2, + opts=patch.diffopts(ui, opts={'git': True, + 'show_function': False, + 'ignore_all_space': False, + 'ignore_space_change': False, + 'ignore_blank_lines': False, + 'unified': True, + 'text': False, + })) + ui.write(cmdutil.filterdiff(''.join(it), baserev, newrev)) + + +def push(orig, ui, repo, dest=None, *args, **opts): + """push revisions starting at a specified head back to Subversion. + """ + opts.pop('svn', None) # unused in this case + svnurl = repo.ui.expandpath(dest or 'default-push', dest or 'default') + if not cmdutil.issvnurl(svnurl): + return orig(ui, repo, dest=dest, *args, **opts) + old_encoding = util.swap_out_encoding() + hge = hg_delta_editor.HgChangeReceiver(repo=repo) + svnurl = util.normalize_url(svnurl) + # split of #rev; TODO: implement --rev/#rev support + svnurl, revs, checkout = hg.parseurl(svnurl, opts.get('rev')) + if svnurl != hge.url: + raise hgutil.Abort('wrong subversion url!') + svn_commit_hashes = dict(zip(hge.revmap.itervalues(), + hge.revmap.iterkeys())) + user, passwd = util.getuserpass(opts) + # Strategy: + # 1. Find all outgoing commits from this head + if len(repo.parents()) != 1: + ui.status('Cowardly refusing to push branch merge\n') + return 1 + workingrev = repo.parents()[0] + ui.status('searching for changes\n') + outgoing = util.outgoing_revisions(ui, repo, hge, svn_commit_hashes, workingrev.node()) + if not (outgoing and len(outgoing)): + ui.status('no changes found\n') + return 0 + while outgoing: + oldest = outgoing.pop(-1) + old_ctx = repo[oldest] + if len(old_ctx.parents()) != 1: + ui.status('Found a branch merge, this needs discussion and ' + 'implementation.\n') + return 1 + base_n = old_ctx.parents()[0].node() + old_children = repo[base_n].children() + svnbranch = repo[base_n].branch() + oldtip = base_n + samebranchchildren = [c for c in repo[oldtip].children() if c.branch() == svnbranch + and c.node() in svn_commit_hashes] + while samebranchchildren: + oldtip = samebranchchildren[0].node() + samebranchchildren = [c for c in repo[oldtip].children() if c.branch() == svnbranch + and c.node() in svn_commit_hashes] + # 2. Commit oldest revision that needs to be pushed + base_revision = svn_commit_hashes[base_n][0] + try: + cmdutil.commit_from_rev(ui, repo, old_ctx, hge, svnurl, + base_revision, user, passwd) + except cmdutil.NoFilesException: + ui.warn("Could not push revision %s because it had no changes in svn.\n" % + old_ctx) + return 1 + # 3. Fetch revisions from svn + # TODO this probably should pass in the source explicitly + r = pull(None, ui, repo, svn=True, stupid=opts.get('svn_stupid', False), + username=user, password=passwd) + assert not r or r == 0 + # 4. Find the new head of the target branch + repo = hg.repository(ui, hge.path) + oldtipctx = repo[oldtip] + replacement = [c for c in oldtipctx.children() if c not in old_children + and c.branch() == oldtipctx.branch()] + assert len(replacement) == 1, 'Replacement node came back as: %r' % replacement + replacement = replacement[0] + # 5. Rebase all children of the currently-pushing rev to the new branch + heads = repo.heads(old_ctx.node()) + for needs_transplant in heads: + def extrafn(ctx, extra): + if ctx.node() == oldest: + return + extra['branch'] = ctx.branch() + rebase(hgrebase.rebase, ui, repo, svn=True, svnextrafn=extrafn, + svnsourcerev=needs_transplant, **opts) + repo = hg.repository(ui, hge.path) + for child in repo[replacement.node()].children(): + rebasesrc = node.bin(child.extra().get('rebase_source', node.hex(node.nullid))) + if rebasesrc in outgoing: + while rebasesrc in outgoing: + rebsrcindex = outgoing.index(rebasesrc) + outgoing = (outgoing[0:rebsrcindex] + + [child.node(), ] + outgoing[rebsrcindex+1:]) + children = [c for c in child.children() if c.branch() == child.branch()] + if children: + child = children[0] + rebasesrc = node.bin(child.extra().get('rebase_source', node.hex(node.nullid))) + hge = hg_delta_editor.HgChangeReceiver(hge.path, ui_=ui) + svn_commit_hashes = dict(zip(hge.revmap.itervalues(), hge.revmap.iterkeys())) + util.swap_out_encoding(old_encoding) + return 0 + + +def clone(orig, ui, source, dest=None, *args, **opts): + '''clone Subversion repository to a local Mercurial repository. + + If no destination directory name is specified, it defaults to the + basename of the source plus "-hg". + + You can specify multiple paths for the location of tags using comma + separated values. + ''' + svnurl = ui.expandpath(source) + if not cmdutil.issvnurl(svnurl): + return orig(ui, source=source, dest=dest, *args, **opts) + + if not dest: + dest = hg.defaultdest(hg.parseurl(source)[0]) + '-hg' + ui.status("Assuming destination %s\n" % dest) + + if os.path.exists(dest): + raise hgutil.Abort("destination '%s' already exists" % dest) + url = util.normalize_url(svnurl) + res = -1 + try: + try: + res = pull(None, ui, None, source=url, svn=None, + svn_stupid=opts.pop('svn_stupid', False), + create_new_dest=dest, **opts) + except core.SubversionException, e: + if e.apr_err == core.SVN_ERR_RA_SERF_SSL_CERT_UNTRUSTED: + raise hgutil.Abort('It appears svn does not trust the ssl cert for this site.\n' + 'Please try running svn ls on that url first.') + raise + finally: + if os.path.exists(dest): + repo = hg.repository(ui, dest) + fp = repo.opener("hgrc", "w", text=True) + fp.write("[paths]\n") + # percent needs to be escaped for ConfigParser + fp.write("default = %(url)s\nsvn = %(url)s\n" % {'url': svnurl}) + fp.close() + if (res is None or res == 0) and not opts.get('noupdate', False): + # Split off #rev + url, revs, checkout = hg.parseurl(svnurl) + for test in (checkout, 'default', 'tip'): + try: + uprev = repo.lookup(test) + break + except: + continue + commands.update(ui, repo, uprev) + + return res + + +def pull(orig, ui, repo, source="default", *args, **opts): + """pull new revisions from Subversion + + Also takes svn, svn_stupid, and create_new_dest kwargs. + """ + svn = opts.pop('svn', None) + svn_stupid = opts.pop('svn_stupid', False) + create_new_dest = opts.pop('create_new_dest', False) + url = ((repo and repo.ui) or ui).expandpath(source) + if not (cmdutil.issvnurl(url) or svn or create_new_dest): + return orig(ui, repo, source=source, *args, **opts) + svn_url = url + svn_url = util.normalize_url(svn_url) + # Split off #rev; TODO: implement --rev/#rev support limiting the pulled/cloned revisions + svn_url, revs, checkout = hg.parseurl(svn_url, opts.get('rev')) + old_encoding = util.swap_out_encoding() + # TODO implement skipto support + skipto_rev = 0 + have_replay = not svn_stupid + if have_replay and not callable( + delta.svn_txdelta_apply(None, None, None)[0]): #pragma: no cover + ui.status('You are using old Subversion SWIG bindings. Replay will not' + ' work until you upgrade to 1.5.0 or newer. Falling back to' + ' a slower method that may be buggier. Please upgrade, or' + ' contribute a patch to use the ctypes bindings instead' + ' of SWIG.\n') + have_replay = False + initializing_repo = False + user, passwd = util.getuserpass(opts) + svn = svnwrap.SubversionRepo(svn_url, user, passwd) + author_host = "@%s" % svn.uuid + tag_locations = ['tags', ] + authors = opts.pop('svn_authors', None) + filemap = opts.pop('svn_filemap', None) + if repo: + hg_editor = hg_delta_editor.HgChangeReceiver(repo=repo, + subdir=svn.subdir, + author_host=author_host, + tag_locations=tag_locations, + authors=authors, + filemap=filemap) + else: + hg_editor = hg_delta_editor.HgChangeReceiver(ui_=ui, + path=create_new_dest, + subdir=svn.subdir, + author_host=author_host, + tag_locations=tag_locations, + authors=authors, + filemap=filemap) + if os.path.exists(hg_editor.uuid_file): + uuid = open(hg_editor.uuid_file).read() + assert uuid == svn.uuid + start = hg_editor.last_known_revision() + else: + open(hg_editor.uuid_file, 'w').write(svn.uuid) + open(hg_editor.svn_url_file, 'w').write(svn_url) + initializing_repo = True + start = skipto_rev + + if initializing_repo and start > 0: + raise hgutil.Abort('Revision skipping at repository initialization ' + 'remains unimplemented.') + + revisions = 0 + if not initializing_repo: + oldheads = len(repo.changelog.heads()) + + # start converting revisions + for r in svn.revisions(start=start): + valid = True + hg_editor.update_branch_tag_map_for_rev(r) + for p in r.paths: + if hg_editor._is_path_valid(p): + valid = True + break + if valid: + # got a 502? Try more than once! + tries = 0 + converted = False + while not converted: + try: + util.describe_revision(ui, r) + if have_replay: + try: + cmdutil.replay_convert_rev(hg_editor, svn, r) + except svnwrap.SubversionRepoCanNotReplay, e: #pragma: no cover + ui.status('%s\n' % e.message) + stupidmod.print_your_svn_is_old_message(ui) + have_replay = False + stupidmod.svn_server_pull_rev(ui, svn, hg_editor, r) + else: + stupidmod.svn_server_pull_rev(ui, svn, hg_editor, r) + converted = True + except core.SubversionException, e: #pragma: no cover + if (e.apr_err == core.SVN_ERR_RA_DAV_REQUEST_FAILED + and '502' in str(e) + and tries < 3): + tries += 1 + ui.status('Got a 502, retrying (%s)\n' % tries) + else: + raise hgutil.Abort(*e.args) + revisions += 1 + util.swap_out_encoding(old_encoding) + + if revisions == 0: + ui.status(i18n._("no changes found\n")) + return + else: + ui.status("added %d svn revisions\n" % revisions) + if not initializing_repo: + newheads = len(repo.changelog.heads()) + # postincoming needs to know if heads were added or removed + # calculation based on mercurial.localrepo.addchangegroup + # 0 means no changes, 1 no new heads, > 1 new heads, < 0 heads removed + modheads = newheads - oldheads + (newheads < oldheads and -1 or 1) + commands.postincoming(ui, repo, modheads, opts.get('update'), checkout) + + +def rebase(orig, ui, repo, **opts): + """rebase current unpushed revisions onto the Subversion head + + This moves a line of development from making its own head to the top of + Subversion development, linearizing the changes. In order to make sure you + rebase on top of the current top of Subversion work, you should probably run + 'hg svn pull' before running this. + + Also looks for svnextrafn and svnsourcerev in **opts. + """ + if not opts.get('svn', False): + return orig(ui, repo, **opts) + def extrafn2(ctx, extra): + """defined here so we can add things easily. + """ + extra['branch'] = ctx.branch() + extrafn = opts.get('svnextrafn', extrafn2) + sourcerev = opts.get('svnsourcerev', repo.parents()[0].node()) + hge = hg_delta_editor.HgChangeReceiver(repo=repo) + svn_commit_hashes = dict(zip(hge.revmap.itervalues(), + hge.revmap.iterkeys())) + o_r = util.outgoing_revisions(ui, repo, hge, svn_commit_hashes, sourcerev=sourcerev) + if not o_r: + ui.status('Nothing to rebase!\n') + return 0 + if len(repo[sourcerev].children()): + ui.status('Refusing to rebase non-head commit like a coward\n') + return 0 + parent_rev = repo[o_r[-1]].parents()[0] + target_rev = parent_rev + p_n = parent_rev.node() + exhausted_choices = False + while target_rev.children() and not exhausted_choices: + for c in target_rev.children(): + exhausted_choices = True + n = c.node() + if (n in svn_commit_hashes and + svn_commit_hashes[n][1] == svn_commit_hashes[p_n][1]): + target_rev = c + exhausted_choices = False + break + if parent_rev == target_rev: + ui.status('Already up to date!\n') + return 0 + return orig(ui, repo, dest=node.hex(target_rev.node()), + base=node.hex(sourcerev), + extrafn=extrafn)