comparison svnwrap/svn_swig_wrapper.py @ 300:4aba7542f6a9

Various cleanups, cosmetics and removal of superfluous assertions.
author Dan Villiom Podlaski Christiansen <danchr@gmail.com>
date Fri, 27 Mar 2009 03:16:21 +0100
parents 32d3f1716e66
children 79440ed81011
comparison
equal deleted inserted replaced
299:3e27514d575c 300:4aba7542f6a9
3 import os 3 import os
4 import shutil 4 import shutil
5 import sys 5 import sys
6 import tempfile 6 import tempfile
7 import hashlib 7 import hashlib
8 import collections
9 import gc
8 10
9 from svn import client 11 from svn import client
10 from svn import core 12 from svn import core
11 from svn import delta 13 from svn import delta
12 from svn import ra 14 from svn import ra
15
16 from mercurial import util as hgutil
13 17
14 def version(): 18 def version():
15 return '%d.%d.%d' % (core.SVN_VER_MAJOR, core.SVN_VER_MINOR, core.SVN_VER_MICRO) 19 return '%d.%d.%d' % (core.SVN_VER_MAJOR, core.SVN_VER_MINOR, core.SVN_VER_MICRO)
16 20
17 if (core.SVN_VER_MAJOR, core.SVN_VER_MINOR, core.SVN_VER_MICRO) < (1, 5, 0): #pragma: no cover 21 if (core.SVN_VER_MAJOR, core.SVN_VER_MINOR, core.SVN_VER_MICRO) < (1, 5, 0): #pragma: no cover
41 45
42 @staticmethod 46 @staticmethod
43 def get_client_string(pool): 47 def get_client_string(pool):
44 return 'hgsubversion' 48 return 'hgsubversion'
45 49
46
47 def user_pass_prompt(realm, default_username, ms, pool): #pragma: no cover 50 def user_pass_prompt(realm, default_username, ms, pool): #pragma: no cover
51 # FIXME: should use getpass() and username() from mercurial.ui
48 creds = core.svn_auth_cred_simple_t() 52 creds = core.svn_auth_cred_simple_t()
49 creds.may_save = ms 53 creds.may_save = ms
50 if default_username: 54 if default_username:
51 sys.stderr.write('Auth realm: %s\n' % (realm,)) 55 sys.stderr.write('Auth realm: %s\n' % (realm,))
52 creds.username = default_username 56 creds.username = default_username
144 def __init__(self, url='', username=''): 148 def __init__(self, url='', username=''):
145 self.svn_url = url 149 self.svn_url = url
146 self.uname = username 150 self.uname = username
147 self.auth_baton_pool = core.Pool() 151 self.auth_baton_pool = core.Pool()
148 self.auth_baton = _create_auth_baton(self.auth_baton_pool) 152 self.auth_baton = _create_auth_baton(self.auth_baton_pool)
153 # self.init_ra_and_client() assumes that a pool already exists
154 self.pool = core.Pool()
149 155
150 self.init_ra_and_client() 156 self.init_ra_and_client()
151 self.uuid = ra.get_uuid(self.ra, self.pool) 157 self.uuid = ra.get_uuid(self.ra, self.pool)
152 repo_root = ra.get_repos_root(self.ra, self.pool) 158 repo_root = ra.get_repos_root(self.ra, self.pool)
153 # *will* have a leading '/', would not if we used get_repos_root2 159 # *will* have a leading '/', would not if we used get_repos_root2
158 164
159 def init_ra_and_client(self): 165 def init_ra_and_client(self):
160 """Initializes the RA and client layers, because sometimes getting 166 """Initializes the RA and client layers, because sometimes getting
161 unified diffs runs the remote server out of open files. 167 unified diffs runs the remote server out of open files.
162 """ 168 """
163 # while we're in here we'll recreate our pool 169 # Debugging code; retained for possible later use.
170 if False:
171 gc.collect()
172 import pympler.muppy.tracker
173 try:
174 self.memory_tracker
175 try:
176 self.memory_tracker.print_diff(self.memory_base)
177 except:
178 print 'HOP'
179 self.memory_base = self.memory_tracker.create_summary()
180 except:
181 print 'HEP'
182 self.memory_tracker = pympler.muppy.tracker.SummaryTracker()
183
184 # while we're in here we'll recreate our pool, but first, we clear it
185 # and destroy it to make possible leaks cause fatal errors.
186 self.pool.clear()
187 self.pool.destroy()
164 self.pool = core.Pool() 188 self.pool = core.Pool()
165 self.client_context = client.create_context() 189 self.client_context = client.create_context()
166 190
167 self.client_context.auth_baton = self.auth_baton 191 self.client_context.auth_baton = self.auth_baton
168 self.client_context.config = svn_config 192 self.client_context.config = svn_config
183 return 0 207 return 0
184 START = property(START) 208 START = property(START)
185 209
186 def branches(self): 210 def branches(self):
187 """Get the branches defined in this repo assuming a standard layout. 211 """Get the branches defined in this repo assuming a standard layout.
212
213 This method should be eliminated; this class does not have
214 sufficient knowledge to yield all known tags.
188 """ 215 """
189 branches = self.list_dir('branches').keys() 216 branches = self.list_dir('branches').keys()
190 branch_info = {} 217 branch_info = {}
191 head=self.HEAD 218 head=self.HEAD
192 for b in branches: 219 for b in branches:
205 232
206 def tags(self): 233 def tags(self):
207 """Get the current tags in this repo assuming a standard layout. 234 """Get the current tags in this repo assuming a standard layout.
208 235
209 This returns a dictionary of tag: (source path, source rev) 236 This returns a dictionary of tag: (source path, source rev)
237
238 This method should be eliminated; this class does not have
239 sufficient knowledge to yield all known tags.
210 """ 240 """
211 return self.tags_at_rev(self.HEAD) 241 return self.tags_at_rev(self.HEAD)
212 tags = property(tags) 242 tags = property(tags)
213 243
214 def tags_at_rev(self, revision): 244 def tags_at_rev(self, revision):
245 """Get the tags in this repo at the given revision, assuming a
246 standard layout.
247
248 This returns a dictionary of tag: (source path, source rev)
249
250 This method should be eliminated; this class does not have
251 sufficient knowledge to yield all known tags.
252 """
215 try: 253 try:
216 tags = self.list_dir('tags', revision=revision).keys() 254 tags = self.list_dir('tags', revision=revision).keys()
217 except core.SubversionException, e: 255 except core.SubversionException, e:
218 if e.apr_err == core.SVN_ERR_FS_NOT_FOUND: 256 if e.apr_err == core.SVN_ERR_FS_NOT_FOUND:
219 return {} 257 return {}
275 """ 313 """
276 # NB: you'd think this would work, but you'd be wrong. I'm pretty 314 # NB: you'd think this would work, but you'd be wrong. I'm pretty
277 # convinced there must be some kind of svn bug here. 315 # convinced there must be some kind of svn bug here.
278 #return self.fetch_history_at_paths(['tags', 'trunk', 'branches'], 316 #return self.fetch_history_at_paths(['tags', 'trunk', 'branches'],
279 # start=start) 317 # start=start)
280 # this does the same thing, but at the repo root + filtering. It's 318 # However, we no longer need such filtering, as we gracefully handle
281 # kind of tough cookies, sadly. 319 # branches located at arbitrary locations.
282 for r in self.fetch_history_at_paths([''], start=start, 320 return self.fetch_history_at_paths([''], start=start, stop=stop,
283 chunk_size=chunk_size): 321 chunk_size=chunk_size)
284 should_yield = False
285 i = 0
286 paths = list(r.paths.keys())
287 while i < len(paths) and not should_yield:
288 p = paths[i]
289 if (p.startswith('trunk') or p.startswith('tags')
290 or p.startswith('branches')):
291 should_yield = True
292 i += 1
293 if should_yield:
294 yield r
295
296 322
297 def fetch_history_at_paths(self, paths, start=None, stop=None, 323 def fetch_history_at_paths(self, paths, start=None, stop=None,
298 chunk_size=1000): 324 chunk_size=_chunk_size):
299 revisions = [] 325 '''TODO: This method should be merged with self.revisions() as
300 def callback(paths, revnum, author, date, message, pool): 326 they are now functionally equivalent.'''
301 r = Revision(revnum, author, message, date, paths,
302 strip_path=self.subdir)
303 revisions.append(r)
304 if not start: 327 if not start:
305 start = self.START 328 start = self.START
306 if not stop: 329 if not stop:
307 stop = self.HEAD 330 stop = self.HEAD
308 while stop > start: 331 while stop > start:
309 ra.get_log(self.ra, 332 def callback(paths, revnum, author, date, message, pool):
310 paths, 333 r = Revision(revnum, author, message, date, paths,
311 start+1, 334 strip_path=self.subdir)
312 stop, 335 revisions.append(r)
313 chunk_size, #limit of how many log messages to load 336 # use a queue; we only access revisions in a FIFO manner
314 True, # don't need to know changed paths 337 revisions = collections.deque()
315 True, # stop on copies 338
316 callback, 339 try:
317 self.pool) 340 # TODO: using min(start + chunk_size, stop) may be preferable;
318 if len(revisions) < chunk_size: 341 # ra.get_log(), even with chunk_size set, takes a while
319 # this means there was no history for the path, so force the 342 # when converting the 65k+ rev. in LLVM.
320 # loop to exit 343 ra.get_log(self.ra,
321 start = stop 344 paths,
345 start+1,
346 stop,
347 chunk_size, #limit of how many log messages to load
348 True, # don't need to know changed paths
349 True, # stop on copies
350 callback,
351 self.pool)
352 except core.SubversionException, e:
353 if e.apr_err not in [core.SVN_ERR_FS_NOT_FOUND]:
354 raise
355 else:
356 raise hgutil.Abort('%s not found at revision %d!'
357 % (self.subdir.rstrip('/'), stop))
358
359 while len(revisions) > 1:
360 yield revisions.popleft()
361 # Now is a good time to do a quick garbage collection.
362 gc.collect(0)
363
364 if len(revisions) == 0:
365 # exit the loop; there is no history for the path.
366 break
322 else: 367 else:
323 start = revisions[-1].revnum 368 r = revisions.popleft()
324 while len(revisions) > 0: 369 start = r.revnum
325 yield revisions[0] 370 yield r
326 revisions.pop(0) 371 self.init_ra_and_client()
372 # Now is a good time to do a thorough garbage colection.
373 gc.collect()
327 374
328 def commit(self, paths, message, file_data, base_revision, addeddirs, 375 def commit(self, paths, message, file_data, base_revision, addeddirs,
329 deleteddirs, properties, copies): 376 deleteddirs, properties, copies):
330 """Commits the appropriate targets from revision in editor's store. 377 """Commits the appropriate targets from revision in editor's store.
331 """ 378 """
340 False, 387 False,
341 self.pool) 388 self.pool)
342 checksum = [] 389 checksum = []
343 # internal dir batons can fall out of scope and get GCed before svn is 390 # internal dir batons can fall out of scope and get GCed before svn is
344 # done with them. This prevents that (credit to gvn for the idea). 391 # done with them. This prevents that (credit to gvn for the idea).
392 # TODO: verify that these are not the cause of our leaks
345 batons = [edit_baton, ] 393 batons = [edit_baton, ]
346 def driver_cb(parent, path, pool): 394 def driver_cb(parent, path, pool):
347 if not parent: 395 if not parent:
348 bat = editor.open_root(edit_baton, base_revision, self.pool) 396 bat = editor.open_root(edit_baton, base_revision, self.pool)
349 batons.append(bat) 397 batons.append(bat)
372 frompath, fromrev = copies.get(path, (None, -1)) 420 frompath, fromrev = copies.get(path, (None, -1))
373 if frompath: 421 if frompath:
374 frompath = self.svn_url + '/' + frompath 422 frompath = self.svn_url + '/' + frompath
375 baton = editor.add_file(path, parent, frompath, fromrev, pool) 423 baton = editor.add_file(path, parent, frompath, fromrev, pool)
376 except (core.SubversionException, TypeError), e: #pragma: no cover 424 except (core.SubversionException, TypeError), e: #pragma: no cover
377 print e.message 425 print e
378 raise 426 raise
379 elif action == 'delete': 427 elif action == 'delete':
380 baton = editor.delete_entry(path, base_revision, parent, pool) 428 baton = editor.delete_entry(path, base_revision, parent, pool)
381 compute_delta = False 429 compute_delta = False
382 430
402 delta.path_driver(editor, edit_baton, base_revision, paths, driver_cb, 450 delta.path_driver(editor, edit_baton, base_revision, paths, driver_cb,
403 self.pool) 451 self.pool)
404 editor.close_edit(edit_baton, self.pool) 452 editor.close_edit(edit_baton, self.pool)
405 453
406 def get_replay(self, revision, editor, oldest_rev_i_have=0): 454 def get_replay(self, revision, editor, oldest_rev_i_have=0):
407 # this method has a tendency to chew through RAM if you don't re-init
408 self.init_ra_and_client()
409 e_ptr, e_baton = delta.make_editor(editor) 455 e_ptr, e_baton = delta.make_editor(editor)
410 try: 456 try:
411 ra.replay(self.ra, revision, oldest_rev_i_have, True, e_ptr, 457 ra.replay(self.ra, revision, oldest_rev_i_have, True, e_ptr,
412 e_baton, self.pool) 458 e_baton, self.pool)
413 except core.SubversionException, e: #pragma: no cover 459 except core.SubversionException, e: #pragma: no cover
414 # can I depend on this number being constant?
415 if (e.apr_err == core.SVN_ERR_RA_NOT_IMPLEMENTED or 460 if (e.apr_err == core.SVN_ERR_RA_NOT_IMPLEMENTED or
416 e.apr_err == core.SVN_ERR_UNSUPPORTED_FEATURE): 461 e.apr_err == core.SVN_ERR_UNSUPPORTED_FEATURE):
417 raise SubversionRepoCanNotReplay, ('This Subversion server ' 462 raise SubversionRepoCanNotReplay, ('This Subversion server '
418 'is older than 1.4.0, and cannot satisfy replay requests.') 463 'is older than 1.4.0, and cannot satisfy replay requests.')
419 else: 464 else:
423 deleted=True, ignore_type=False): 468 deleted=True, ignore_type=False):
424 """Gets a unidiff of path at revision against revision-1. 469 """Gets a unidiff of path at revision against revision-1.
425 """ 470 """
426 if not self.hasdiff3: 471 if not self.hasdiff3:
427 raise SubversionRepoCanNotDiff() 472 raise SubversionRepoCanNotDiff()
428 # works around an svn server keeping too many open files (observed
429 # in an svnserve from the 1.2 era)
430 self.init_ra_and_client()
431 473
432 assert path[0] != '/' 474 assert path[0] != '/'
433 url = self.svn_url + '/' + path 475 url = self.svn_url + '/' + path
434 url2 = url 476 url2 = url
435 if other_path is not None: 477 if other_path is not None:
489 mode = ("svn:special" in info) and 'l' or mode 531 mode = ("svn:special" in info) and 'l' or mode
490 except core.SubversionException, e: 532 except core.SubversionException, e:
491 notfound = (core.SVN_ERR_FS_NOT_FOUND, 533 notfound = (core.SVN_ERR_FS_NOT_FOUND,
492 core.SVN_ERR_RA_DAV_PATH_NOT_FOUND) 534 core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
493 if e.apr_err in notfound: # File not found 535 if e.apr_err in notfound: # File not found
494 raise IOError() 536 raise IOError, e.args[0]
495 raise 537 raise
496 if mode == 'l': 538 if mode == 'l':
497 linkprefix = "link " 539 linkprefix = "link "
498 if data.startswith(linkprefix): 540 if data.startswith(linkprefix):
499 data = data[len(linkprefix):] 541 data = data[len(linkprefix):]
531 'dirpath' and 'kind' is 'f' if the entry is a file, 'd' if it is a 573 'dirpath' and 'kind' is 'f' if the entry is a file, 'd' if it is a
532 directory. Raise IOError if the directory cannot be found at given 574 directory. Raise IOError if the directory cannot be found at given
533 revision. 575 revision.
534 """ 576 """
535 dirpath = dirpath.strip('/') 577 dirpath = dirpath.strip('/')
536 pool = core.Pool()
537 rpath = '/'.join([self.svn_url, dirpath]).strip('/') 578 rpath = '/'.join([self.svn_url, dirpath]).strip('/')
538 rev = optrev(revision) 579 rev = optrev(revision)
539 try: 580 try:
540 entries = client.ls(rpath, rev, True, self.client_context, pool) 581 entries = client.ls(rpath, rev, True, self.client_context,
582 self.pool)
541 except core.SubversionException, e: 583 except core.SubversionException, e:
542 if e.apr_err == core.SVN_ERR_FS_NOT_FOUND: 584 if e.apr_err == core.SVN_ERR_FS_NOT_FOUND:
543 raise IOError('%s cannot be found at r%d' % (dirpath, revision)) 585 raise IOError('%s cannot be found at r%d' % (dirpath, revision))
544 raise 586 raise
545 for path, e in entries.iteritems(): 587 for path, e in entries.iteritems():