[svn.haxx.se] · SVN Dev · SVN Users · SVN Org · TSVN Dev · TSVN Users · Subclipse Dev · Subclipse Users · this month's index

cvs2svn: optimized tag creation #2

From: Marko Macek <Marko.Macek_at_gmx.net>
Date: 2003-02-26 22:25:02 CET

Hi!

New patch, fixes a few issues detected so far. Not ready to be merged,
but good enough for testing.

The next thing I am going to do is split up the ".commits" file
generation for merging.

Please comment,

Changes:
 - Currently assumes that revision 1.1.1.1 is the same as 1.1 (cvs import
   was only used at initial repository creation). otherwise it's hard
to convert
   any branches because cvs puts tags on 1.1.1.1 when file is not modiifed.
   This needs to be made optional.
 - Support for vendor branches has been disabled (due to above)
 - Nested branches are not yet tested
 - Timestamps not handled correctly yet
 - Some passes share data and can't be restarted
 - Doesn't yet break up commits if they are tagged differently
 - Handlings of tags on subset is not optimal (reverts to file by file copy
   even when subtree copy would be possible).

Regards,
Mark

Index: cvs2svn.py
===================================================================
--- cvs2svn.py (revision 5121)
+++ cvs2svn.py (working copy)
@@ -28,6 +28,7 @@
 SORTED_REVS_SUFFIX = '.s-revs'
 TAGS_SUFFIX = '.tags'
 RESYNC_SUFFIX = '.resync'
+COMMITS_SUFFIX = '.commits'
 
 SVNROOT = 'svnroot'
 ATTIC = os.sep + 'Attic'
@@ -41,6 +42,16 @@
 
 verbose = 1
 
+copy_from = { None: [ None, 0 ] }
+first_branch = None
+copies_done = { }
+is_branch = { }
+tag_fileset = { }
+repo_state = { None: { } }
+warnings = [ ]
+is_optimal_copy = { }
+subset_copy_nr = { }
+tag_subset = { }
 
 class CollectData(rcsparse.Sink):
   def __init__(self, cvsroot, log_fname_base):
@@ -78,9 +89,11 @@
     last_dot = revision.rfind(".")
     branch_rev = revision[:last_dot]
     last2_dot = branch_rev.rfind(".")
- branch_rev = branch_rev[:last2_dot] + revision[last_dot:]
+ base_rev = branch_rev[:last2_dot]
+ branch_rev = base_rev + revision[last_dot:]
     self.set_branch_name(branch_rev, branch_name)
- self.add_branch_point(branch_rev[:last2_dot], branch_name)
+ if base_rev == "1.1.1.1": base_rev = "1.1"
+ self.add_branch_point(base_rev, branch_name)
 
   def get_tags(self, revision):
     if self.taglist.has_key(revision):
@@ -95,18 +108,24 @@
       return []
 
   def define_tag(self, name, revision):
+ if name == "arelease": return
     self.tags.write('%s %s %s\n' % (name, revision, self.fname))
     if branch_tag.match(revision):
       self.add_cvs_branch(revision, name)
     elif vendor_tag.match(revision):
- self.set_branch_name(revision, name)
+# self.set_branch_name(revision, name)
+ pass
     else:
+ if revision == "1.1.1.1": revision = "1.1"
       if not self.taglist.has_key(revision):
         self.taglist[revision] = []
       self.taglist[revision].append(name)
 
   def define_revision(self, revision, timestamp, author, state,
                       branches, next):
+
+ if revision == "1.1.1.1": return
+
     ### what else?
     if state == 'dead':
       op = OP_DELETE
@@ -117,12 +136,20 @@
     self.rev_data[revision] = [int(timestamp), author, op, None]
 
     # record the previous revision for sanity checking later
- if trunk_rev.match(revision):
- self.prev[revision] = next
- elif next:
- self.prev[next] = revision
+
+ if next != "1.1.1.1":
+ if trunk_rev.match(revision):
+ self.prev[revision] = next
+ elif next and next != "1.1.1.1":
+ self.prev[next] = revision
+
+# print "branches : ", branches
     for b in branches:
- self.prev[b] = revision
+ if b != "1.1.1.1":
+ self.prev[b] = revision
+
+ if self.prev.has_key("1.1.1.1"):
+ print "defrev", revision, next, self.prev
 
   def tree_completed(self):
     "The revision tree has been parsed. Analyze it for consistency."
@@ -136,6 +163,9 @@
     # links that we have recorded and validate that the timestamp on the
     # previous revision is before the specified revision
 
+# for a,b in self.prev.items():
+# print "PREV: ", a, b
+
     # if we have to resync some nodes, then we restart the scan. just keep
     # looping as long as we need to restart.
     while 1:
@@ -171,6 +201,7 @@
         return
 
   def set_revision_info(self, revision, log, text):
+ if revision == "1.1.1.1": return
     timestamp, author, op, old_ts = self.rev_data[revision]
     digest = sha.new(log + '\0' + author).hexdigest()
     if old_ts:
@@ -331,6 +362,133 @@
           else:
             raise RuntimeError, 'Error parsing diff commands'
 
+def add_tag_filerev(br, f, r, to):
+ if not tag_fileset.has_key(to): tag_fileset[to] = { }
+ if not tag_fileset[to].has_key(f): tag_fileset[to][f] = { }
+ tag_fileset[to][f] = r
+
+def is_tag_complete(br):
+ if copies_done.has_key(br):
+ return
+ source = copy_from[br][0]
+# print "IS_COMPLETE", br, source, tag_fileset[br]
+# if repo_state.has_key(source): print "REPO_STATE: ", repo_state[source]
+ if not repo_state.has_key(source):
+ return 0
+ if tag_fileset[br] == repo_state[source]:
+ return 1
+ else:
+ return 0
+
+def is_tag_subset(br, subset):
+ if copies_done.has_key(br):
+ return
+ source = copy_from[br][0]
+# print "IS_COMPLETE", br, source, tag_fileset[br]
+# if repo_state.has_key(source): print "REPO_STATE: ", repo_state[source]
+ if not repo_state.has_key(source):
+ return 0
+ if subset == repo_state[source]:
+ return 1
+ else:
+ return 0
+
+def tag_subset_nr(br):
+ source = copy_from[br][0]
+ count = 0
+
+ if repo_state.has_key(source):
+ for a in repo_state[source]:
+ if not tag_fileset[br].has_key(a):
+ return 0, None
+ if tag_fileset[br][a] == repo_state[source][a]:
+ count += 1
+ if count == 0:
+ return 0, None
+ new_state = { }
+ for a in repo_state[source]:
+ new_state[a] = repo_state[source][a]
+ return count, new_state
+
+def need_branch(ctx, t_fs, branch_name, reason):
+ copy_branch(ctx, t_fs, branch_name, reason)
+
+def check_addition(ctx, t_fs, branch_name, f):
+ for t in tag_fileset.keys():
+ if is_optimal_copy.has_key(t):
+ continue
+ if tag_subset.has_key(t):
+ continue
+ source = copy_from[t][0]
+ if source == branch_name: ### FIXME: CHECK IF ANCESTOR, not PARENT
+ if not tag_fileset[t].has_key(f): # and not repo_state[source].has_key(f):
+ print "ADDITION", branch_name, f, t
+ warnings.append("ADDITION " + str(branch_name) + " " + f + " " + t)
+ need_branch(ctx, t_fs, t, "add " + f)
+
+def check_change(ctx, t_fs, branch_name, f):
+ for t in tag_fileset.keys():
+ if is_optimal_copy.has_key(t):
+ continue
+# if tag_subset.has_key(t):
+# continue
+ source = copy_from[t][0]
+ if source == branch_name: ### FIXME: CHECK IF ANCESTOR, not PARENT
+ if tag_fileset[t].has_key(f):
+ if repo_state.has_key(source) and repo_state[source].has_key(f):
+ if tag_fileset[t][f] == repo_state[source][f]:
+ print "CHANGE", branch_name, f, t
+ warnings.append("CHANGE " + str(branch_name) + " " + f + " " + t)
+ need_branch(ctx, t_fs, t, "change " + f)
+
+def copy_branch(ctx, t_fs, branch_name, reason):
+ if branch_name == None:
+ return
+ if copies_done.has_key(branch_name):
+ return
+ need_branch(ctx, t_fs, copy_from[branch_name][0], "need parent: " + reason)
+
+ c_pool = util.svn_pool_create(ctx.pool)
+ source = copy_from[branch_name][0]
+ c_from = branch_path(ctx, source)[:-1]
+ if is_branch.has_key(branch_name):
+ c_to = branch_path(ctx, branch_name)[:-1]
+ else:
+ c_to = get_tag_path(ctx, branch_name)[:-1]
+
+ print "REPO: ", source, repo_state[source]
+ print "TAG: ", branch_name
+ if tag_fileset.has_key(branch_name):
+ print "FILES: ", tag_fileset[branch_name]
+
+ log_msg = "copied branch %s to %s %s" % (c_from, c_to, reason)
+ print log_msg
+
+ rev = fs.youngest_rev(t_fs, c_pool)
+ txn = fs.begin_txn(t_fs, rev, c_pool)
+ root = fs.txn_root(txn, c_pool)
+
+ t_root = fs.revision_root(t_fs, rev, c_pool)
+ make_path(fs, root, c_to, c_pool)
+ fs.copy(t_root, c_from, root, c_to, c_pool)
+
+ fs.change_txn_prop(txn, 'svn:author', "cvs2svn", c_pool)
+ fs.change_txn_prop(txn, 'svn:log', log_msg, c_pool)
+
+ conflicts, new_rev = fs.commit_txn(txn)
+ if conflicts != '\n':
+ print ' CONFLICTS:', `conflicts`
+ print ' new revision:', new_rev
+
+ util.svn_pool_destroy(c_pool)
+
+ copies_done[branch_name] = 1
+
+ files = { }
+ for f in repo_state[source]:
+ files[f] = repo_state[source][f]
+ repo_state[branch_name] = files
+
 class Commit:
   def __init__(self):
     self.files = { }
@@ -342,7 +500,7 @@
   def has_file(self, fname):
     return self.files.has_key(fname)
 
- def add(self, t, op, file, rev, branch_name, tags, branches):
+ def add(self, t, id, op, file, rev, branch_name, tags, branches):
     # record the time range of this commit
     if t < self.t_min:
       self.t_min = t
@@ -350,25 +508,68 @@
       self.t_max = t
 
     if op == OP_CHANGE:
- self.changes.append((file, rev, branch_name, tags, branches))
+ self.changes.append((t, id, op, file, rev, branch_name, tags, branches))
     else:
       # OP_DELETE
- self.deletes.append((file, rev, branch_name, tags, branches))
+ self.deletes.append((t, id, op, file, rev, branch_name, tags, branches))
     self.files[file] = 1
 
+ def write(self, nr, output):
+ output.write("%d\n" % nr)
+ output.write("%d %s\n" % (len(self.changes), OP_CHANGE))
+ for change in self.changes:
+ t, id, op, file, rev, branch_name, tags, branches = change
+ write_revs_line(output, t, id, op, rev, file, branch_name, tags, branches)
+ output.write("%d %s\n" % (len(self.deletes), OP_DELETE))
+ for delete in self.deletes:
+ t, id, op, file, rev, branch_name, tags, branches = delete
+ write_revs_line(output, t, id, op, rev, file, branch_name, tags, branches)
+ pass
+
+ def read(self, f):
+ line = f.readline()
+ if line == '':
+ return 1
+ line = f.readline()
+ data = line.split(' ', 2)
+ if data[1] != OP_CHANGE + "\n":
+ raise "error"
+ nchanges = int(data[0])
+ while nchanges > 0:
+ line = f.readline()
+ print "CHANGE: ", line
+ timestamp, digest, op, rev, fname, branch_name, tags, branches = \
+ parse_revs_line(line)
+ self.add(timestamp, digest, op, fname, rev, branch_name, tags, branches)
+ nchanges -= 1
+ line = f.readline()
+ data = line.split(' ', 2)
+ if data[1] != OP_DELETE + "\n":
+ raise "error"
+ ndeletes = int(data[0])
+ while ndeletes > 0:
+ line = f.readline()
+ print "DELETE: ", line
+ timestamp, digest, op, rev, fname, branch_name, tags, branches = \
+ parse_revs_line(line)
+ self.add(timestamp, digest, op, fname, rev, branch_name, tags, branches)
+ ndeletes -= 1
+ return 0
+
   def get_metadata(self, pool):
     # by definition, the author and log message must be the same for all
     # items that went into this commit. therefore, just grab any item from
     # our record of changes/deletes.
     if self.changes:
- file, rev, br, tags, branches = self.changes[0]
+ timestamp, digest, op, file, rev, br, tags, branches = self.changes[0]
     else:
       # there better be one...
- file, rev, br, tags, branches = self.deletes[0]
+ timestamp, digest, op, file, rev, br, tags, branches = self.deletes[0]
 
     # now, fetch the author/log from the ,v file
     rip = RevInfoParser()
     rip.parse_cvs_file(file)
+ print "DEBUG get author: ", file, rev
     author = rip.authors[rev]
     log = rip.logs[rev]
 
@@ -378,6 +579,56 @@
 
     return author, log, date
 
+ def update_state(self, ctx):
+
+ for timestamp, digest, op, f, r, br, tags, branches in self.changes:
+ rel_name = relative_name(ctx.cvsroot, f[:-2])
+ repos_path = branch_path(ctx, br) + rel_name
+ print ' changing %s : %s' % (r, repos_path)
+ if not repo_state.has_key(br):
+ repo_state[br] = { }
+ repo_state[br][rel_name] = r
+ for br in tags + branches:
+ if not repo_state.has_key(br):
+ repo_state[br] = { }
+ repo_state[br][rel_name] = r
+
+ print "+" * 72
+ for timestamp, digest, op, f, r, br, tags, branches in self.deletes:
+ rel_name = relative_name(ctx.cvsroot, f[:-2])
+ repos_path = branch_path(ctx, br) + rel_name
+ print ' deleting %s : %s' % (r, repos_path)
+
+ if r == "1.1":
+ continue
+ # If the file was initially added on a branch, the first mainline
+ # revision will be marked dead, and thus, attempts to delete it will
+ # fail, since it doesn't really exist.
+ del repo_state[br][rel_name]
+
+ for to in tag_fileset.keys():
+ if is_tag_complete(to):
+ is_optimal_copy[to] = 1
+ if tag_subset.has_key(to):
+ del tag_subset[to]
+ del subset_copy_nr[to]
+ pass
+ else:
+ nr, state = tag_subset_nr(to)
+ if nr > 0:
+ if not subset_copy_nr.has_key(to) or nr > subset_copy_nr[to]:
+ subset_copy_nr[to] = nr
+ tag_subset[to] = state
+ for a in tag_subset[to]:
+ if not tag_fileset[to].has_key(a):
+ print "EXTRA FILE", a, to
+ raise "test"
+ pass
+ pass
+ pass
+ pass
+ pass
+
   def commit(self, t_fs, ctx):
     # commit this transaction
     print 'committing: %s, over %d seconds' % (time.ctime(self.t_min),
@@ -397,6 +648,46 @@
       print ' (skipped; dry run enabled)'
       return
 
+ print "=" * 72
+ do_copies = [ ]
+
+ def add_copy(do_copies, f, r, repos_path, to, flag, br):
+ if copies_done.has_key(to):
+ print "adding file to branch", to, f, r, br
+ need_branch(ctx, t_fs, to, "need branch for a copy")
+ if flag:
+ to_path = branch_path(ctx, to) + rel_name
+ else:
+ to_path = get_tag_path(ctx, to) + rel_name
+ do_copies.append((repos_path, to_path, flag, to, rel_name, r))
+
+ for timestamp, digest, op, f, r, br, tags, branches in self.changes:
+ rel_name = relative_name(ctx.cvsroot, f[:-2])
+ #repos_path = branch_path(ctx, br) + rel_name
+ check_change(ctx, t_fs, br, rel_name)
+ check_addition(ctx, t_fs, br, rel_name)
+ need_branch(ctx, t_fs, br, "change file on branch")
+ for timestamp, digest, op, f, r, br, tags, branches in self.deletes:
+ if r != "1.1":
+ rel_name = relative_name(ctx.cvsroot, f[:-2])
+ #repos_path = branch_path(ctx, br) + rel_name
+ check_change(ctx, t_fs, br, rel_name)
+ check_addition(ctx, t_fs, br, rel_name)
+
+ need_branch(ctx, t_fs, br, "change file on branch")
+
+ for timestamp, digest, op, f, r, br, tags, branches in self.changes:
+ print "change", f, r, br, tags, branches
+ # compute a repository path. ensure we have a leading "/" and drop
+ # the ,v from the file name
+ rel_name = relative_name(ctx.cvsroot, f[:-2])
+ print "REL_NAME", rel_name, br, tags, branches
+ repos_path = branch_path(ctx, br) + rel_name
+ for to_tag in tags:
+ add_copy(do_copies, rel_name, r, repos_path, to_tag, 0, br)
+ for to_branch in branches:
+ add_copy(do_copies, rel_name, r, repos_path, to_branch, 1, br)
+
     # create a pool for the entire commit
     c_pool = util.svn_pool_create(ctx.pool)
 
@@ -406,12 +697,10 @@
 
     lastcommit = (None, None)
 
- do_copies = [ ]
-
     # create a pool for each file; it will be cleared on each iteration
     f_pool = util.svn_pool_create(c_pool)
 
- for f, r, br, tags, branches in self.changes:
+ for timestamp, digest, op, f, r, br, tags, branches in self.changes:
       # compute a repository path. ensure we have a leading "/" and drop
       # the ,v from the file name
       rel_name = relative_name(ctx.cvsroot, f[:-2])
@@ -477,17 +766,14 @@
       # stream, and anything the FS may have done.
       util.svn_pool_clear(f_pool)
 
+ if not repo_state.has_key(br):
+ repo_state[br] = { }
+ repo_state[br][rel_name] = r
+
       # remember what we just did, for the next iteration
       lastcommit = (repos_path, r)
 
- for to_tag in tags:
- to_tag_path = get_tag_path(ctx, to_tag) + rel_name
- do_copies.append((repos_path, to_tag_path, 1))
- for to_branch in branches:
- to_branch_path = branch_path(ctx, to_branch) + rel_name
- do_copies.append((repos_path, to_branch_path, 2))
-
- for f, r, br, tags, branches in self.deletes:
+ for timestamp, digest, op, f, r, br, tags, branches in self.deletes:
       # compute a repository path. ensure we have a leading "/" and drop
       # the ,v from the file name
       rel_name = relative_name(ctx.cvsroot, f[:-2])
@@ -501,6 +787,7 @@
       if r != '1.1':
         ### need to discriminate between OS paths and FS paths
         fs.delete(root, repos_path, f_pool)
+ del repo_state[br][rel_name]
 
       for to_branch in branches:
         to_branch_path = branch_path(ctx, to_branch) + rel_name
@@ -527,8 +814,8 @@
     fs.change_rev_prop(t_fs, new_rev, 'svn:date', date, c_pool)
 
     ### how come conflicts is a newline?
- if conflicts != '\n':
- print ' CONFLICTS:', `conflicts`
+# if conflicts != '\n':
+# print ' CONFLICTS:', `conflicts`
     print ' new revision:', new_rev
 
     if len(do_copies) > 0:
@@ -537,7 +824,7 @@
       txn = fs.begin_txn(t_fs, rev, c_pool)
       root = fs.txn_root(txn, c_pool)
 
- for c_from, c_to, c_type in do_copies:
+ for c_from, c_to, c_type, br, fn, rn in do_copies:
         print "copying", c_from, "to", c_to
 
         t_root = fs.revision_root(t_fs, rev, f_pool)
@@ -546,6 +833,7 @@
 
         # clear the pool after each copy
         util.svn_pool_clear(f_pool)
+ repo_state[br][fn] = rn
 
       log_msg = "%d copies to tags/branches\n" % (len(do_copies))
       fs.change_txn_prop(txn, 'svn:author', "cvs2svn", c_pool)
@@ -558,6 +846,13 @@
 
       # FIXME: we don't set a date here
 
+ for to in tag_fileset.keys():
+ if is_tag_complete(to):
+ need_branch(ctx, t_fs, to, "perfect copy")
+ elif tag_subset.has_key(to):
+ if is_tag_subset(to, tag_subset[to]):
+ need_branch(ctx, t_fs, to, "subset copy")
+
     # done with the commit and file pools
     util.svn_pool_destroy(c_pool)
 
@@ -683,16 +978,43 @@
   os.system('sort %s > %s' % (ctx.log_fname_base + CLEAN_REVS_SUFFIX,
                               ctx.log_fname_base + SORTED_REVS_SUFFIX))
 
+# determine the branch hierarchy.
+# TODO: only handles tree hierarchies for now, no DAGs yet
 def pass4(ctx):
- # create the target repository
- if not ctx.dry_run:
- if ctx.create_repos:
- t_repos = repos.svn_repos_create(ctx.target, None, None, None, ctx.pool)
- else:
- t_repos = repos.svn_repos_open(ctx.target, ctx.pool)
- t_fs = repos.svn_repos_fs(t_repos)
- else:
- t_fs = t_repos = None
+ global copy_from
+ global first_branch
+ global repo_state
+
+ for line in fileinput.FileInput(ctx.log_fname_base + SORTED_REVS_SUFFIX):
+ timestamp, id, op, rev, fname, branch_name, tags, branches = \
+ parse_revs_line(line)
+
+ for tag in tags + branches:
+ if not copy_from.has_key(branch_name):
+ copy_from[branch_name] = [ None, 0, 1 ]
+
+ level = copy_from[branch_name][1]
+ if not copy_from.has_key(tag) or copy_from[tag][1] <= level:
+ copy_from[tag] = (branch_name, level + 1, 0)
+
+ is_branch[branch_name] = 1
+
+ for br in branches:
+ is_branch[br] = 1
+
+ for tag in tags + branches:
+ add_tag_filerev(branch_name, relative_name(ctx.cvsroot, fname[:-2]), rev, tag)
+
+
+ print "IS_BRANCH", is_branch
+ print "FIRST_BRANCH: ", first_branch
+ for a in copy_from.keys():
+ print "COPY FROM: ", repr(a), " " * (30 - len(repr(a))), copy_from[a]
+ print "TAG_FILESET: ", tag_fileset
+
+def pass5(ctx):
+ "combine individual changes into commits (change sets)"
+ commit_output = open(ctx.log_fname_base + COMMITS_SUFFIX, 'w')
 
   # process the logfiles, creating the target
   commits = { }
@@ -719,15 +1041,15 @@
     # sort the commits into time-order, then commit 'em
     process.sort()
     for t_max, c in process:
- c.commit(t_fs, ctx)
- count = count + len(process)
+ c.write(count, commit_output)
+ count += 1
 
     # add this item into the set of commits we're assembling
     if commits.has_key(id):
       c = commits[id]
     else:
       c = commits[id] = Commit()
- c.add(timestamp, op, fname, rev, branch_name, tags, branches)
+ c.add(timestamp, id, op, fname, rev, branch_name, tags, branches)
 
   # if there are any pending commits left, then flush them
   if commits:
@@ -736,17 +1058,93 @@
       process.append((c.t_max, c))
     process.sort()
     for t_max, c in process:
- c.commit(t_fs, ctx)
- count = count + len(process)
+ c.write(count, commit_output)
+ count += 1
 
   if ctx.verbose:
     print count, 'commits processed.'
 
+def pass6(ctx):
+ global repo_state
+ repo_state = { None: { } }
+
+ commits_input = open(ctx.log_fname_base + COMMITS_SUFFIX, 'r')
+ while 1:
+ c = Commit()
+ if c.read(commits_input) == 1:
+ break
+ c.update_state(ctx)
+
+ for to in tag_fileset.keys():
+ if is_optimal_copy.has_key(to):
+ print "PERFECT TAG COPY", to
+ elif tag_subset.has_key(to):
+ print "PARTIAL TAG COPY", to, " ", len(tag_subset[to].keys()), "/", len(tag_fileset[to].keys())
+ for a in tag_fileset[to]:
+ if not tag_subset[to].has_key(a):
+ print a, " ",
+ elif tag_subset[to][a] != tag_fileset[to][a]:
+ print " ", a, "* ", tag_subset[to][a], tag_fileset[to][a]
+ for a in tag_subset[to]:
+ if not tag_fileset[to].has_key(a):
+ print "bad file", a, to
+ raise "foo"
+ else:
+ print "FILE BY FILE TAG COPY", to
+ pass
+ pass
+
+def pass7(ctx):
+ "create the target repository"
+ global repo_state
+ repo_state = { None: { } }
+
+ if not ctx.dry_run:
+ if ctx.create_repos:
+ t_repos = repos.svn_repos_create(ctx.target, None, None, ctx.pool)
+ else:
+ t_repos = repos.svn_repos_open(ctx.target, ctx.pool)
+ t_fs = repos.svn_repos_fs(t_repos)
+ else:
+ t_fs = t_repos = None
+
+ c_pool = util.svn_pool_create(ctx.pool)
+ rev = fs.youngest_rev(t_fs, c_pool)
+ txn = fs.begin_txn(t_fs, rev, c_pool)
+ root = fs.txn_root(txn, c_pool)
+
+ t_root = fs.revision_root(t_fs, rev, c_pool)
+ make_path(fs, root, "/trunk/", c_pool)
+
+ fs.change_txn_prop(txn, 'svn:author', "cvs2svn", c_pool)
+ fs.change_txn_prop(txn, 'svn:log', "making /trunk", c_pool)
+
+ conflicts, new_rev = fs.commit_txn(txn)
+ if conflicts != '\n':
+ print ' CONFLICTS:', `conflicts`
+ print ' new revision:', new_rev
+
+ util.svn_pool_destroy(c_pool)
+
+ commits_input = open(ctx.log_fname_base + COMMITS_SUFFIX, 'r')
+ while 1:
+ c = Commit()
+ if c.read(commits_input) == 1:
+ break
+ c.commit(t_fs, ctx)
+
+ for to in copy_from.keys():
+ if not copies_done.has_key(to):
+ warnings.append("TAG NOT CONVERTED: " + str(to))
+
 _passes = [
   pass1,
   pass2,
   pass3,
   pass4,
+ pass5,
+ pass6,
+ pass7,
   ]
 
 class _ctx:
@@ -796,7 +1194,7 @@
   ctx.trunk_base = "/trunk"
   ctx.tags_base = "/tags"
   ctx.branches_base = "/branches"
- ctx.encoding = "ascii"
+ ctx.encoding = "iso-8859-1"
 
   try:
     opts, args = getopt.getopt(sys.argv[1:], 'p:s:vn',
@@ -835,6 +1233,8 @@
       ctx.encoding = value
 
   util.run_app(convert, ctx, start_pass=start_pass)
+ for a in warnings:
+ print "WARNING: ", a
 
 if __name__ == '__main__':
   main()

---------------------------------------------------------------------
To unsubscribe, e-mail: dev-unsubscribe@subversion.tigris.org
For additional commands, e-mail: dev-help@subversion.tigris.org
Received on Wed Feb 26 22:18:18 2003

This is an archived mail posted to the Subversion Dev mailing list.

This site is subject to the Apache Privacy Policy and the Apache Public Forum Archive Policy.