diff --git a/contrib/client-side/incremental-update.py b/contrib/client-side/incremental-update.py index 93ae1270471f1..c6a5d225adafc 100755 --- a/contrib/client-side/incremental-update.py +++ b/contrib/client-side/incremental-update.py @@ -158,15 +158,15 @@ def main(): args = ' '.join(sys.argv[1:-1] + ['--non-interactive']) print "Fetch HEAD revision...", head_revision = get_head_revision(path, args) - print "done." - print "Updating to revision %d" % (head_revision) + print("done.") + print("Updating to revision %d" % (head_revision)) print "Harvesting the list of subdirectories...", dirs = harvest_dirs(path) - print "done." + print("done.") dirs.sort(compare_paths) dirs.reverse() - print "Update the tree, one subdirectory at a time. This could take " \ - "a while." + print("Update the tree, one subdirectory at a time. This could take " \ + "a while.") num_dirs = len(dirs) width = len(str(num_dirs)) format_string = '[%%%dd/%%%dd] Updating %%s' % (width, width) diff --git a/contrib/client-side/svn_export_empty_files.py b/contrib/client-side/svn_export_empty_files.py index 6e2c9cd455015..d75009b1fd965 100755 --- a/contrib/client-side/svn_export_empty_files.py +++ b/contrib/client-side/svn_export_empty_files.py @@ -71,7 +71,7 @@ def check_url_for_export(ctx, url, revision, client_ctx): # is something wrong with the given URL. try: if ctx.verbose: - print "Trying to list '%s'" % url + print("Trying to list '%s'" % url) svn.client.ls(url, revision, 0, client_ctx) # Given a URL, the ls command does not tell you if @@ -82,7 +82,7 @@ def check_url_for_export(ctx, url, revision, client_ctx): try: last_slash_index = url.rindex('/') except ValueError: - print "Cannot find a / in the URL '%s'" % url + print("Cannot find a / in the URL '%s'" % url) return False parent_url = url[:last_slash_index] @@ -90,31 +90,31 @@ def check_url_for_export(ctx, url, revision, client_ctx): try: if ctx.verbose: - print "Trying to list '%s'" % parent_url + print("Trying to list '%s'" % parent_url) remote_ls = svn.client.ls(parent_url, revision, 0, client_ctx) except svn.core.SubversionException: if ctx.verbose: - print "Listing of '%s' failed, assuming URL is top of repos" \ - % parent_url + print("Listing of '%s' failed, assuming URL is top of repos" \ + % parent_url) return True try: path_info = remote_ls[path_name] except ValueError: - print "Able to ls '%s' but '%s' not in ls of '%s'" \ - % (url, path_name, parent_url) + print("Able to ls '%s' but '%s' not in ls of '%s'" \ + % (url, path_name, parent_url)) return False if svn.core.svn_node_dir != path_info.kind: if ctx.verbose: - print "The URL '%s' is not a directory" % url + print("The URL '%s' is not a directory" % url) return False else: if ctx.verbose: - print "The URL '%s' is a directory" % url + print("The URL '%s' is a directory" % url) return True finally: pass @@ -154,16 +154,16 @@ def synchronize_dir(ctx, url, dir_name, revision, client_ctx): msg = ("'%s' which is a local non-directory but remotely a " + "directory") % dir_name if ctx.delete_local_paths: - print "Removing", msg + print("Removing", msg) os.unlink(dir_name) local_path_kind = LOCAL_PATH_NONE else: - print "Need to remove", msg + print("Need to remove", msg) ctx.delete_needed = True return False if LOCAL_PATH_NONE == local_path_kind: - print "Creating directory '%s'" % dir_name + print("Creating directory '%s'" % dir_name) os.mkdir(dir_name) remote_ls = svn.client.ls(url, @@ -172,7 +172,7 @@ def synchronize_dir(ctx, url, dir_name, revision, client_ctx): client_ctx) if ctx.verbose: - print "Syncing '%s' to '%s'" % (url, dir_name) + print("Syncing '%s' to '%s'" % (url, dir_name)) remote_pathnames = remote_ls.keys() remote_pathnames.sort() @@ -191,7 +191,7 @@ def synchronize_dir(ctx, url, dir_name, revision, client_ctx): if remote_pathname in ctx.ignore_names or \ full_remote_pathname in ctx.ignore_paths: - print "Skipping '%s'" % full_remote_pathname + print("Skipping '%s'" % full_remote_pathname) continue # Get the remote path kind. @@ -221,16 +221,16 @@ def synchronize_dir(ctx, url, dir_name, revision, client_ctx): msg = ("'%s' which is a local directory but remotely a " + "non-directory") % full_remote_pathname if ctx.delete_local_paths: - print "Removing", msg + print("Removing", msg) recursive_delete(full_remote_pathname) local_path_kind = LOCAL_PATH_NONE else: - print "Need to remove", msg + print("Need to remove", msg) ctx.delete_needed = True continue if LOCAL_PATH_NONE == local_path_kind: - print "Creating file '%s'" % full_remote_pathname + print("Creating file '%s'" % full_remote_pathname) f = file(full_remote_pathname, 'w') f.close() @@ -240,17 +240,17 @@ def synchronize_dir(ctx, url, dir_name, revision, client_ctx): full_local_pathname = os.path.join(dir_name, local_pathname) if os.path.isdir(full_local_pathname): if ctx.delete_local_paths: - print "Removing directory '%s'" % full_local_pathname + print("Removing directory '%s'" % full_local_pathname) recursive_delete(full_local_pathname) else: - print "Need to remove directory '%s'" % full_local_pathname + print("Need to remove directory '%s'" % full_local_pathname) ctx.delete_needed = True else: if ctx.delete_local_paths: - print "Removing file '%s'" % full_local_pathname + print("Removing file '%s'" % full_local_pathname) os.unlink(full_local_pathname) else: - print "Need to remove file '%s'" % full_local_pathname + print("Need to remove file '%s'" % full_local_pathname) ctx.delete_needed = True return status @@ -293,14 +293,14 @@ def main(ctx, url, export_pathname): client_ctx) if ctx.delete_needed: - print "There are files and directories in the local filesystem" - print "that do not exist in the Subversion repository that were" + print("There are files and directories in the local filesystem") + print("that do not exist in the Subversion repository that were") print "not deleted. ", if ctx.delete_needed: - print "Please pass the --delete command line option" - print "to have this script delete those files and directories." + print("Please pass the --delete command line option") + print("to have this script delete those files and directories.") else: - print "" + print("") if status: return 0 diff --git a/contrib/client-side/svnmerge/svnmerge.py b/contrib/client-side/svnmerge/svnmerge.py index 365f7cdbf9ae4..60ca4da4762c2 100755 --- a/contrib/client-side/svnmerge/svnmerge.py +++ b/contrib/client-side/svnmerge/svnmerge.py @@ -212,7 +212,7 @@ def error(s): def report(s): """Subroutine to output progress message, unless in quiet mode.""" if opts["verbose"]: - print "%s: %s" % (NAME, s) + print("%s: %s" % (NAME, s)) def prefix_lines(prefix, lines): """Given a string representing one or more lines of text, insert the @@ -1312,10 +1312,10 @@ def display_revisions(revs, display_style, revisions_msg, source_url): for start, end in revs.normalized(): print if start == end: - print "%s: changes in revision %d follow" % (NAME, start) + print("%s: changes in revision %d follow" % (NAME, start)) else: - print "%s: changes in revisions %d-%d follow" % (NAME, - start, end) + print("%s: changes in revisions %d-%d follow" % (NAME, + start, end)) print # Note: the starting revision number to 'svn diff' is @@ -1878,7 +1878,7 @@ def _print_wrapped(self, text, indent=0): text = self._paragraphs(text, self.cwidth - indent) print text.pop(0) for t in text: - print " " * indent + t + print(" " * indent + t) def _find_common(self, fl): for o in self.copts: @@ -1988,12 +1988,12 @@ def error(self, s, cmd=None): self.print_small_help() sys.exit(1) def print_small_help(self): - print "Type '%s help' for usage" % self.progname + print("Type '%s help' for usage" % self.progname) def print_usage_line(self): - print "usage: %s [options...] [args...]\n" % self.progname + print("usage: %s [options...] [args...]\n" % self.progname) def print_command_list(self): - print "Available commands (use '%s help COMMAND' for more details):\n" \ - % self.progname + print("Available commands (use '%s help COMMAND' for more details):\n" \ + % self.progname) cmds = self.ctable.keys() cmds.sort() indent = max(map(len, cmds)) @@ -2003,7 +2003,7 @@ def print_command_list(self): self._print_wrapped(h, indent+6) def print_command_help(self, cmd): cmd = self.ctable[str(cmd)] - print 'usage: %s %s\n' % (self.progname, cmd.usage) + print('usage: %s %s\n' % (self.progname, cmd.usage)) self._print_wrapped(cmd.help) def print_opts(opts, self=self): if not opts: return @@ -2012,9 +2012,9 @@ def print_opts(opts, self=self): for f,o in zip(flags, opts): print " %-*s :" % (indent, f), self._print_wrapped(o.help, indent+5) - print '\nCommand options:' + print('\nCommand options:') print_opts(cmd.opts) - print '\nGlobal options:' + print('\nGlobal options:') print_opts(self.gopts) def print_version(self): @@ -2374,5 +2374,5 @@ def main(args): error(err_msg) except KeyboardInterrupt: # Avoid traceback on CTRL+C - print "aborted by user" + print("aborted by user") sys.exit(1) diff --git a/contrib/hook-scripts/hook_toolbox.py b/contrib/hook-scripts/hook_toolbox.py index c8e2a0389e44c..32d5762da812b 100644 --- a/contrib/hook-scripts/hook_toolbox.py +++ b/contrib/hook-scripts/hook_toolbox.py @@ -99,7 +99,7 @@ def read_config(repos, filename, expected_tokens_per_line=-1): ''' path = os.path.join(repos, 'conf', filename) if not os.path.exists(path): - print 'Not present:', path + print('Not present:', path) return [] config_lines = open(path).readlines() @@ -114,16 +114,16 @@ def read_config(repos, filename, expected_tokens_per_line=-1): if len(tokens) == expected_tokens_per_line ] if len(matching_lines) < len(tokenized_lines): - print '*** %d syntax errors in %s' % ( + print('*** %d syntax errors in %s' % ( len(tokenized_lines) - len(matching_lines), - path) + path)) return matching_lines def update_working_copy(wc_path): if not os.path.exists(wc_path): - print '--> *** Cannot find working copy', wc_path + print('--> *** Cannot find working copy', wc_path) return None return run(os.path.join(BIN_PATH, 'svn'), 'update', wc_path) @@ -131,7 +131,7 @@ def update_working_copy(wc_path): def run(*cmd): '''Call the given command & args and return what it printed to stdout. e.g. result = run('/usr/bin/svn', 'info', wc_dir_path) ''' - print '-->', ' '.join(cmd) + print('-->', ' '.join(cmd)) stdout = subprocess.Popen(cmd, stdout=subprocess.PIPE).communicate()[0] print stdout.strip() return stdout diff --git a/contrib/hook-scripts/remove-zombie-locks.py b/contrib/hook-scripts/remove-zombie-locks.py index dda57c3e06e13..22b22e7323e76 100755 --- a/contrib/hook-scripts/remove-zombie-locks.py +++ b/contrib/hook-scripts/remove-zombie-locks.py @@ -120,8 +120,8 @@ def run(self): """iterate over every locked file in repo_path/repo_subpath, calling unlock_nonexistent_files for each""" - print "Removing all zombie locks from repository at %s\n" \ - "This may take several minutes..." % self.repos_path + print("Removing all zombie locks from repository at %s\n" \ + "This may take several minutes..." % self.repos_path) # Try to use svn_fs_get_locks2() if it's present, as it's believed # to be problem-free. @@ -151,7 +151,7 @@ def bdb_lock_callback(lock, callback_pool): else: svn.fs.svn_fs_get_locks(self.fs_ptr, self.repos_subpath, self.unlock_nonexistent_files, self.pool) - print "Done." + print("Done.") class RevisionZombieLockRemover: diff --git a/contrib/server-side/add-needs-lock.py b/contrib/server-side/add-needs-lock.py index 3ab3f514797a9..0bad8b59a2221 100755 --- a/contrib/server-side/add-needs-lock.py +++ b/contrib/server-side/add-needs-lock.py @@ -121,7 +121,7 @@ def addneedslock(repos_path, uname='', commitmsg='', included='.*', excluded='^$ interesting_files = [] - print 'Searching ' + str(len(files)) + ' file(s)...' + print('Searching ' + str(len(files)) + ' file(s)...') for path in files: locked_val = svn.fs.get_lock(fsob, path) @@ -135,7 +135,7 @@ def addneedslock(repos_path, uname='', commitmsg='', included='.*', excluded='^$ if interesting_files: if dryrun: for path in interesting_files: - print "Need to add svn:needs-lock to '" + path + "'" + print("Need to add svn:needs-lock to '" + path + "'") else: # open a transaction against HEAD headrev = svn.fs.youngest_rev(fsob) @@ -143,21 +143,21 @@ def addneedslock(repos_path, uname='', commitmsg='', included='.*', excluded='^$ root = svn.fs.txn_root(txn) for path in interesting_files: - print "Adding svn:needs-lock to '" + path + "'..." + print("Adding svn:needs-lock to '" + path + "'...") svn.fs.change_node_prop(root, path, svn.core.SVN_PROP_NEEDS_LOCK, '*') conflict, newrev = svn.fs.commit_txn(txn) if conflict: raise Exception("Conflict encountered (%s)" % conflict) - print 'Created revision: ', newrev + print('Created revision: ', newrev) else: - print 'Nothing changed. Current Revision: ', headrev + print('Nothing changed. Current Revision: ', headrev) ################################################################################ def usage(): - print "USAGE: add-needs-lock.py [-u username] [-m commitmsg] [-i includeregexp] [-e excluderegexp] [-r REV] [-d] REPOS-PATH" + print("USAGE: add-needs-lock.py [-u username] [-m commitmsg] [-i includeregexp] [-e excluderegexp] [-r REV] [-d] REPOS-PATH") sys.exit(1) @@ -184,12 +184,12 @@ def main(): if name == '-r': rev = int(value) if name == '-d': - print 'Performing dry run...' + print('Performing dry run...') dryrun = 1 if rev is None: - print 'Searching all files...' + print('Searching all files...') else: - print 'Searching revision: ' + str(rev) + '...' + print('Searching revision: ' + str(rev) + '...') if len(args) == 1: addneedslock(args[0], uname, commitmsg, included, excluded, rev, dryrun) else: diff --git a/contrib/server-side/fsfsfixer/fixer/find_good_id.py b/contrib/server-side/fsfsfixer/fixer/find_good_id.py index 5328f64b54a84..31981767ebf77 100755 --- a/contrib/server-side/fsfsfixer/fixer/find_good_id.py +++ b/contrib/server-side/fsfsfixer/fixer/find_good_id.py @@ -101,7 +101,7 @@ def find_good_rep_header(repo_dir, rev, size): repo_dir = sys.argv[1] rev = sys.argv[2] size = sys.argv[3] - print "Good offset:", find_good_rep_header(repo_dir, rev, size) + print("Good offset:", find_good_rep_header(repo_dir, rev, size)) sys.exit(0) if len(sys.argv) != 3: diff --git a/contrib/server-side/fsfsfixer/fixer/fix-rev.py b/contrib/server-side/fsfsfixer/fixer/fix-rev.py index aab96e7f67faf..9d2b20157eb3c 100755 --- a/contrib/server-side/fsfsfixer/fixer/fix-rev.py +++ b/contrib/server-side/fsfsfixer/fixer/fix-rev.py @@ -101,7 +101,7 @@ def fix_id(repo_dir, rev, bad_id): "good id '" + good_id + "'") replace_in_rev_file(repo_dir, rev, bad_id, good_id) - print "Fixed id: " + bad_id + " -> " + good_id + print("Fixed id: " + bad_id + " -> " + good_id) fixed_ids[bad_id] = good_id def fix_checksum(repo_dir, rev, old_checksum, new_checksum): @@ -112,7 +112,7 @@ def fix_checksum(repo_dir, rev, old_checksum, new_checksum): assert old_checksum != new_checksum replace_in_rev_file(repo_dir, rev, old_checksum, new_checksum) - print "Fixed checksum: " + old_checksum + " -> " + new_checksum + print("Fixed checksum: " + old_checksum + " -> " + new_checksum) fixed_checksums[old_checksum] = new_checksum def fix_rep_ref(repo_dir, rev, prefix, rep_rev, bad_offset, rep_size): @@ -127,7 +127,7 @@ def fix_rep_ref(repo_dir, rev, prefix, rep_rev, bad_offset, rep_size): if good_offset == bad_offset: raise FixError("Attempting to fix a rep ref that appears to be correct: " + old_line) replace_in_rev_file(repo_dir, rev, old_line, new_line) - print "Fixed rep ref:", old_line, "->", new_line + print("Fixed rep ref:", old_line, "->", new_line) def handle_one_error(repo_dir, rev, error_lines): @@ -227,7 +227,7 @@ def fix_one_error(repo_dir, rev): else: verbose_print("Unrecognized error message; trying 'svnlook' instead.") except FixError as e: - print 'warning:', e + print('warning:', e) verbose_print("Trying 'svnlook' instead.") # At this point, we've got an 'svnadmin' error that we don't know how to @@ -238,7 +238,7 @@ def fix_one_error(repo_dir, rev): svnlook_err = grab_stderr([SVNLOOK, 'tree', '-r'+rev, repo_dir]) if svnlook_err == []: - print 'warning: svnlook did not find an error' + print('warning: svnlook did not find an error') else: if handle_one_error(repo_dir, rev, svnlook_err): return True @@ -281,7 +281,7 @@ def fix_rev(repo_dir, rev): # Keep looking for verification errors in r$REV and fixing them while we can. while fix_one_error(repo_dir, rev): pass - print "Revision " + rev + " verifies OK." + print("Revision " + rev + " verifies OK.") if __name__ == '__main__': @@ -296,5 +296,5 @@ def fix_rev(repo_dir, rev): try: fix_rev(repo_dir, rev) except FixError as e: - print 'error:', e + print('error:', e) sys.exit(1) diff --git a/contrib/server-side/fsfsverify.py b/contrib/server-side/fsfsverify.py index 3e42979a9d64f..a1667794f19c9 100755 --- a/contrib/server-side/fsfsverify.py +++ b/contrib/server-side/fsfsverify.py @@ -765,7 +765,7 @@ def __init__(self, f, currentRev): if nodeId.rev != self.id.rev: if not os.path.exists(str(nodeId.rev)): - print "Can't check %s" % repr(nodeId) + print("Can't check %s" % repr(nodeId)) continue tmp = open(str(nodeId.rev),'rb') tmp.seek(nodeId.offset) @@ -785,8 +785,8 @@ def __init__(self, f, currentRev): f.seek(offset) else: # The directory entries are stored in another file. - print "Warning: dir entries are stored in rev %d for noderev %s" % ( - self.text.rev, repr(self.id)) + print("Warning: dir entries are stored in rev %d for noderev %s" % ( + self.text.rev, repr(self.id))) def __repr__(self): str = 'NodeRev Id: %s\n type: %s\n' % (repr(self.id), repr(self.type)) @@ -858,18 +858,18 @@ def getRootAndChangedPaths(revFile): def dumpChangedPaths(changedPaths): - print "Changed Path Information:" + print("Changed Path Information:") for (path, (id, action, textMod, propMod, copyfromRev, copyfromPath)) in changedPaths: - print " %s:" % path - print " id: %s" % id - print " action: %s" % action - print " text mod: %s" % textMod - print " prop mod: %s" % propMod + print(" %s:" % path) + print(" id: %s" % id) + print(" action: %s" % action) + print(" text mod: %s" % textMod) + print(" prop mod: %s" % propMod) if copyfromRev != -1: - print " copyfrom path: %s" % copyfromPath - print " copyfrom rev: %s" % copyfromRev + print(" copyfrom path: %s" % copyfromPath) + print(" copyfrom rev: %s" % copyfromRev) print @@ -944,7 +944,7 @@ def verify(noderev, revFile, dumpInstructions, dumpWindows): def truncate(noderev, revFile): txnId = noderev.id - print "Truncating node %s (%s)" % (txnId, noderev.cpath) + print("Truncating node %s (%s)" % (txnId, noderev.cpath)) # Grab the text rep textRep = noderev.text @@ -980,7 +980,7 @@ def truncate(noderev, revFile): newTextRep = ' '.join(fields) + '\x0a' assert(len(newTextRep) == overallLength) revFile.write(newTextRep) - print "Done." + print("Done.") sys.exit(0) @@ -1003,7 +1003,7 @@ def fixHeader(e, revFile): headerLen = len(line) offset = offset + len(line) - print "Original text rep located at", originalOffset + print("Original text rep located at", originalOffset) # Okay, now we have the original offset of the text rep that was # in the process of being written out. The header portion of the @@ -1014,14 +1014,14 @@ def fixHeader(e, revFile): revFile.seek(originalOffset) block = revFile.read(copyLen) - print "Copy %d bytes from offset %d" % (copyLen, originalOffset) + print("Copy %d bytes from offset %d" % (copyLen, originalOffset)) - print "Write %d bytes at offset %d" % (copyLen, e.offset) + print("Write %d bytes at offset %d" % (copyLen, e.offset)) revFile.seek(e.offset) revFile.write(block) revFile.flush() - print "Fixed? :-) Re-run fsfsverify without the -f option" + print("Fixed? :-) Re-run fsfsverify without the -f option") def fixStream(e, revFile): @@ -1050,8 +1050,8 @@ def fixStream(e, revFile): srcOffset = errorOffset destOffset = repeatedBlockOffset - print "Copy %d bytes from offset %d" % (srcLength, srcOffset) - print "Write %d bytes at offset %d" % (srcLength, destOffset) + print("Copy %d bytes from offset %d" % (srcLength, srcOffset)) + print("Write %d bytes at offset %d" % (srcLength, destOffset)) while srcOffset < finalOffset: blen = 64*1024 @@ -1068,7 +1068,7 @@ def fixStream(e, revFile): revFile.flush() revFile.close() - print "Fixed? :-) Re-run fsfsverify without the -f option" + print("Fixed? :-) Re-run fsfsverify without the -f option") def checkOptions(options): diff --git a/contrib/server-side/svn-tweak-author.py b/contrib/server-side/svn-tweak-author.py index cbd185c29ad44..d8b5306a8d01b 100755 --- a/contrib/server-side/svn-tweak-author.py +++ b/contrib/server-side/svn-tweak-author.py @@ -74,9 +74,9 @@ def tweak_rev_author(fs_obj, revision, author): fs.svn_fs_change_rev_prop(fs_obj, revision, core.SVN_PROP_REVISION_AUTHOR, author) except: - print "" + print() raise - print "done." + print("done.") def get_fs_obj(repos_path): """Return a repository filesystem object for the repository diff --git a/notes/directory-index/dirindex.py b/notes/directory-index/dirindex.py index b143e75efc270..87cbefdce5fe4 100644 --- a/notes/directory-index/dirindex.py +++ b/notes/directory-index/dirindex.py @@ -754,13 +754,13 @@ def __init__(self, index): } def greektree(ix, tx): def populate(track, items): - print 'Populating', track + print('Populating', track) for name, kind in items.iteritems(): if kind == 'file': node = ix.add(tx, track, name, NodeRev.FILE) else: node = ix.add(tx, track, name, NodeRev.DIR) - print 'Added', node, 'node:', node.noderev + print('Added', node, 'node:', node.noderev) if isinstance(kind, dict): populate(node, kind) @@ -773,16 +773,16 @@ def simpletest(database): ix.initialize() try: - print "Lookup root" + print("Lookup root") tx = ix.get_txn() - print "transaction:", tx + print("transaction:", tx) root = ix.lookup(tx) - print "root track:", root - print "root noderev", root.noderev + print("root track:", root) + print("root noderev", root.noderev) - print 'Create greek tree' + print('Create greek tree') tx = ix.new_txn(0) - print "transaction:", tx + print("transaction:", tx) greektree(ix, tx) ix.commit_txn(tx, 1) ix.commit() @@ -794,20 +794,20 @@ def listdir(noderev, prefix): if n._isdir: listdir(n, prefix + " ") - print "List contents" + print("List contents") tx = ix.get_txn() - print "transaction:", tx + print("transaction:", tx) root = ix.lookup(tx) print str(root.noderev) listdir(root.noderev, " ") - print "Lookup iota" + print("Lookup iota") track = ix.lookup(tx, None, "iota") - print str(track), str(track.noderev) + print(str(track), str(track.noderev)) - print "Lookup A/D/H/psi" + print("Lookup A/D/H/psi") track = ix.lookup(tx, None, "A/D/H/psi") - print str(track), str(track.noderev) + print(str(track), str(track.noderev)) finally: ix.close() diff --git a/notes/move-tracking/path_pairs_to_eid_map.py b/notes/move-tracking/path_pairs_to_eid_map.py index c5e2034b5a961..2814481fc27d2 100755 --- a/notes/move-tracking/path_pairs_to_eid_map.py +++ b/notes/move-tracking/path_pairs_to_eid_map.py @@ -256,5 +256,5 @@ def write_parent_eid(mapping, side, eid): for eid in converter.peid_loc_pairs(): relpath0 = converter.peid_locs_for_side(0).relpath_from_eid(eid) relpath1 = converter.peid_locs_for_side(1).relpath_from_eid(eid) - print "%3d %-12s %-12s" % (eid, relpath0, relpath1) + print("%3d %-12s %-12s" % (eid, relpath0, relpath1)) diff --git a/subversion/tests/cmdline/svntest/main.py b/subversion/tests/cmdline/svntest/main.py index 7f6454a1aec5f..166c4ccec740d 100644 --- a/subversion/tests/cmdline/svntest/main.py +++ b/subversion/tests/cmdline/svntest/main.py @@ -2178,8 +2178,8 @@ def get_issue_details(issue_numbers): # Parse the xml for ISSUE_NO from the issue tracker into a Document. issue_xml_f = urllib.urlopen(xml_url) except: - print "WARNING: Unable to contact issue tracker; " \ - "milestones defaulting to 'unknown'." + print("WARNING: Unable to contact issue tracker; " \ + "milestones defaulting to 'unknown'.") return issue_dict try: @@ -2198,7 +2198,7 @@ def get_issue_details(issue_numbers): assignment = assignment_element[0].childNodes[0].nodeValue issue_dict[issue_id] = [milestone, assignment] except: - print "ERROR: Unable to parse target milestones from issue tracker" + print("ERROR: Unable to parse target milestones from issue tracker") raise return issue_dict diff --git a/subversion/tests/cmdline/svntest/verify.py b/subversion/tests/cmdline/svntest/verify.py index e643f116235c7..18623b59b22a3 100644 --- a/subversion/tests/cmdline/svntest/verify.py +++ b/subversion/tests/cmdline/svntest/verify.py @@ -765,8 +765,8 @@ def compare_dump_files(message, label, expected, actual, action_record['blanks'] = 0 if parsed_expected != parsed_actual: - print 'DIFF of raw dumpfiles (including expected differences)' - print ''.join(ndiff(expected, actual)) + print('DIFF of raw dumpfiles (including expected differences)') + print(''.join(ndiff(expected, actual))) raise svntest.Failure('DIFF of parsed dumpfiles (ignoring expected differences)\n' + '\n'.join(ndiff( pprint.pformat(parsed_expected).splitlines(), diff --git a/subversion/tests/manual/tree-conflicts-add-vs-add.py b/subversion/tests/manual/tree-conflicts-add-vs-add.py index 2943cd56ef822..8ba39f282e3a8 100755 --- a/subversion/tests/manual/tree-conflicts-add-vs-add.py +++ b/subversion/tests/manual/tree-conflicts-add-vs-add.py @@ -39,9 +39,9 @@ def run_cmd(cmd, verbose=True, shell=False): if verbose: if shell: - print '\n---', cmd + print('\n---', cmd) else: - print '\n---', ' '.join(cmd) + print('\n---', ' '.join(cmd)) p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=shell) stdout,stderr = p.communicate()[0:2] if verbose and stdout: @@ -406,7 +406,7 @@ def analyze(name, outs): results.append( (name, analyze( name, test_func( name, *row[1:] ) )) ) except: if name: - print 'Error during', name + print('Error during', name) raise finally: lines = [] diff --git a/tools/client-side/mergeinfo-sanitizer.py b/tools/client-side/mergeinfo-sanitizer.py index e3201c676681d..8411dfa286af1 100755 --- a/tools/client-side/mergeinfo-sanitizer.py +++ b/tools/client-side/mergeinfo-sanitizer.py @@ -106,14 +106,14 @@ def location_segment_callback(segment, pool): # This function does the authentication in an interactive way ## def prompt_func_ssl_unknown_cert(realm, failures, cert_info, may_save, pool): - print "The certificate details are as follows:" - print "--------------------------------------" - print "Issuer : " + str(cert_info.issuer_dname) - print "Hostname : " + str(cert_info.hostname) - print "ValidFrom : " + str(cert_info.valid_from) - print "ValidUpto : " + str(cert_info.valid_until) - print "Fingerprint: " + str(cert_info.fingerprint) - print "" + print("The certificate details are as follows:") + print("--------------------------------------") + print("Issuer : " + str(cert_info.issuer_dname)) + print("Hostname : " + str(cert_info.hostname)) + print("ValidFrom : " + str(cert_info.valid_from)) + print("ValidUpto : " + str(cert_info.valid_until)) + print("Fingerprint: " + str(cert_info.fingerprint)) + print("") ssl_trust = core.svn_auth_cred_ssl_server_trust_t() if may_save: choice = raw_input( "accept (t)temporarily (p)permanently: ") @@ -182,13 +182,13 @@ def sanitize_mergeinfo(parsed_original_mergeinfo, repo_root, wcpath, mergeinfo, 1, temp_pool) #There should be no mergeinfo added by our population. There should only #be deletion of mergeinfo. so take it from diff_mergeinfo[0] - print "The bogus mergeinfo summary:" + print("The bogus mergeinfo summary:") bogus_mergeinfo_deleted = diff_mergeinfo[0] for bogus_mergeinfo_path in bogus_mergeinfo_deleted: sys.stdout.write(bogus_mergeinfo_path + ": ") for revision_range in bogus_mergeinfo_deleted[bogus_mergeinfo_path]: sys.stdout.write(str(revision_range.start + 1) + "-" + str(revision_range.end) + ",") - print "" + print() ## # This function tries to 'propset the new mergeinfo into the working copy. @@ -233,7 +233,7 @@ def fix_sanitized_mergeinfo(parsed_original_mergeinfo, repo_root, wcpath, os.remove(newmergeinfo_file) os.remove(hash_file) else: - print "The hashes are not matching. Probable chance of unwanted tweaking in the mergeinfo" + print("The hashes are not matching. Probable chance of unwanted tweaking in the mergeinfo") ## @@ -242,8 +242,8 @@ def fix_sanitized_mergeinfo(parsed_original_mergeinfo, repo_root, wcpath, def check_local_modifications(wcpath, temp_pool): has_local_mod = wc.svn_wc_revision_status(wcpath, None, 0, None, temp_pool) if has_local_mod.modified: - print """The working copy has local modifications. Please revert them or clean -the working copy before running the script.""" + print("""The working copy has local modifications. Please revert them or clean +the working copy before running the script.""") sys.exit(1) def get_original_mergeinfo(wcpath, revision, depth, ctx, temp_pool): @@ -313,7 +313,7 @@ def main(): try: main() except KeyboardInterrupt: - print "" + print() sys.stderr.write("The script is interrupted and stopped manually.") - print "" + print() diff --git a/tools/dev/benchmarks/RepoPerf/copy_repo.py b/tools/dev/benchmarks/RepoPerf/copy_repo.py index a95a82d59fc25..4c04b300a1f07 100644 --- a/tools/dev/benchmarks/RepoPerf/copy_repo.py +++ b/tools/dev/benchmarks/RepoPerf/copy_repo.py @@ -296,15 +296,15 @@ def copy_repos(src, dst, count, separator_size): def show_usage(): """ Write a simple CL docstring """ - print "Copies and duplicates repositories in a way that mimics larger deployments." - print - print "Usage:" - print "copy_repo.py SRC DST COUNT SEPARATOR_SIZE" - print - print "SRC Immediate parent folder of all the repositories to copy." - print "DST Folder to copy into; current contents will be lost." - print "COUNT Number of copies to create of each source repository." - print "SEPARATOR_SIZE Additional spacing, in kBytes, between revisions." + print("Copies and duplicates repositories in a way that mimics larger deployments.") + print() + print("Usage:") + print("copy_repo.py SRC DST COUNT SEPARATOR_SIZE") + print() + print("SRC Immediate parent folder of all the repositories to copy.") + print("DST Folder to copy into; current contents will be lost.") + print("COUNT Number of copies to create of each source repository.") + print("SEPARATOR_SIZE Additional spacing, in kBytes, between revisions.") #main function if len(argv) == 5: diff --git a/tools/dev/benchmarks/suite1/benchmark.py b/tools/dev/benchmarks/suite1/benchmark.py index fc61848f7c3ac..aeff594f3c40b 100755 --- a/tools/dev/benchmarks/suite1/benchmark.py +++ b/tools/dev/benchmarks/suite1/benchmark.py @@ -181,7 +181,7 @@ def run_cmd(cmd, stdin=None, shell=False, verbose=False): printable_cmd = cmd else: printable_cmd = ' '.join(cmd) - print 'CMD:', printable_cmd + print('CMD:', printable_cmd) if stdin: stdin_arg = subprocess.PIPE @@ -197,9 +197,9 @@ def run_cmd(cmd, stdin=None, shell=False, verbose=False): if verbose: if (stdout): - print "STDOUT: [[[\n%s]]]" % ''.join(stdout) + print("STDOUT: [[[\n%s]]]" % ''.join(stdout)) if (stderr): - print "STDERR: [[[\n%s]]]" % ''.join(stderr) + print("STDERR: [[[\n%s]]]" % ''.join(stderr)) return stdout, stderr @@ -350,7 +350,7 @@ def ensure_tables_created(self): # exists return - print 'Creating database tables.' + print('Creating database tables.') c.executescript(''' CREATE TABLE batch ( batch_id INTEGER PRIMARY KEY AUTOINCREMENT, @@ -468,7 +468,7 @@ def remember_timing(self, command, seconds): def submit_timings(self): conn = self.batch.db.conn c = conn.cursor() - print 'submitting...' + print('submitting...') c.executemany(""" INSERT INTO timings @@ -629,7 +629,7 @@ def svn(*args): cmd = [ svn_bin ] cmd.extend( list(args) ) if verbose: - print 'svn cmd:', ' '.join(cmd) + print('svn cmd:', ' '.join(cmd)) stdin = None if stdin: @@ -652,9 +652,9 @@ def svn(*args): if verbose: if (stdout): - print "STDOUT: [[[\n%s]]]" % ''.join(stdout) + print("STDOUT: [[[\n%s]]]" % ''.join(stdout)) if (stderr): - print "STDERR: [[[\n%s]]]" % ''.join(stderr) + print("STDERR: [[[\n%s]]]" % ''.join(stderr)) return stdout,stderr @@ -792,9 +792,9 @@ def rmtree_onerror(func, path, exc_info): else: file_url = 'file:///%s' % repos - print '\nRunning svn benchmark in', base - print 'dir levels: %s; new files and dirs per leaf: %s' %( - run_kind.levels, run_kind.spread) + print('\nRunning svn benchmark in', base) + print('dir levels: %s; new files and dirs per leaf: %s' %( + run_kind.levels, run_kind.spread)) started = datetime.datetime.now() @@ -876,7 +876,7 @@ def rmtree_onerror(func, path, exc_info): finally: stopped = datetime.datetime.now() - print '\nDone with svn benchmark in', (stopped - started) + print('\nDone with svn benchmark in', (stopped - started)) run.remember_timing(TOTAL_RUN, timedelta_to_seconds(stopped - started)) @@ -896,8 +896,8 @@ def cmdline_run(db, options, run_kind_str, N=1): N = int(N) - print 'Hi, going to run a Subversion benchmark series of %d runs...' % N - print 'Label is %s' % run_kind.label() + print('Hi, going to run a Subversion benchmark series of %d runs...' % N) + print('Label is %s' % run_kind.label()) # can we run the svn binaries? svn_bin = j(options.svn_bin_dir, 'svn') @@ -908,12 +908,12 @@ def cmdline_run(db, options, run_kind_str, N=1): if not so: bail("Can't run %s" % b) - print ', '.join([s.strip() for s in so.split('\n')[:2]]) + print(', '.join([s.strip() for s in so.split('\n')[:2]])) batch = Batch(db) for i in range(N): - print 'Run %d of %d' % (i + 1, N) + print('Run %d of %d' % (i + 1, N)) perform_run(batch, run_kind, svn_bin, svnadmin_bin, options.verbose) @@ -934,31 +934,31 @@ def add_if_not_none(name, val): add_if_not_none('levels', run_kind.levels) add_if_not_none('spread', run_kind.spread) if constraints: - print 'For\n', '\n'.join(constraints) - print 'I found:' + print('For\n', '\n'.join(constraints)) + print('I found:') d = TimingQuery(db, run_kind) cmd_names = d.get_sorted_command_names() if cmd_names: - print '\n%d command names:\n ' % len(cmd_names), '\n '.join(cmd_names) + print('\n%d command names:\n ' % len(cmd_names), '\n '.join(cmd_names)) branches = d.get_sorted_branches() if branches and (len(branches) > 1 or branches[0] != run_kind.branch): - print '\n%d branches:\n ' % len(branches), '\n '.join(branches) + print('\n%d branches:\n ' % len(branches), '\n '.join(branches)) revisions = d.get_sorted_revisions() if revisions and (len(revisions) > 1 or revisions[0] != run_kind.revision): - print '\n%d revisions:\n ' % len(revisions), '\n '.join(revisions) + print('\n%d revisions:\n ' % len(revisions), '\n '.join(revisions)) levels_spread = d.get_sorted_levels_spread() if levels_spread and ( len(levels_spread) > 1 or levels_spread[0] != (run_kind.levels, run_kind.spread)): - print '\n%d kinds of levels x spread:\n ' % len(levels_spread), '\n '.join( - [ ('%dx%d' % (l, s)) for l,s in levels_spread ]) + print('\n%d kinds of levels x spread:\n ' % len(levels_spread), '\n '.join( + [ ('%dx%d' % (l, s)) for l,s in levels_spread ])) - print "\n%d runs in %d batches.\n" % (d.count_runs_batches()) + print("\n%d runs in %d batches.\n" % (d.count_runs_batches())) def cmdline_show(db, options, *run_kind_strings): @@ -983,7 +983,7 @@ def cmdline_show(db, options, *run_kind_strings): tavg, command_name)) - print '\n'.join(s) + print('\n'.join(s)) def cmdline_compare(db, options, *args): @@ -1004,7 +1004,7 @@ def cmdline_compare(db, options, *args): rightq = TimingQuery(db, right_kind) right = rightq.get_timings() if not right: - print "No timings for %s" % right_kind.label() + print("No timings for %s" % right_kind.label()) continue label = 'Compare %s to %s' % (right_kind.label(), left_kind.label()) @@ -1051,7 +1051,7 @@ def cmdline_compare(db, options, *args): ]) - print '\n'.join(s) + print('\n'.join(s)) # ------------------------------------------------------- charts @@ -1073,7 +1073,7 @@ def cmdline_chart_compare(db, options, *args): query = TimingQuery(db, run_kind) timings = query.get_timings() if not timings: - print "No timings for %s" % run_kind.label() + print("No timings for %s" % run_kind.label()) continue labels.append(run_kind.label()) timing_sets.append(timings) @@ -1215,7 +1215,7 @@ def cmdline_chart_compare(db, options, *args): va='center', weight='bold') plt.savefig(chart_path) - print 'wrote chart file:', chart_path + print('wrote chart file:', chart_path) # ------------------------------------------------------------ main diff --git a/tools/dev/gen-py-errors.py b/tools/dev/gen-py-errors.py index 9ca0365f7c470..a2d7afb390ec3 100755 --- a/tools/dev/gen-py-errors.py +++ b/tools/dev/gen-py-errors.py @@ -56,7 +56,7 @@ def write_output(codes): for name, value in codes: # skip SVN_ERR_ on the name - print '%s = %d' % (name[8:], value) + print('%s = %d' % (name[8:], value)) def main(codes_fname): diff --git a/tools/dev/histogram.py b/tools/dev/histogram.py index 4b977fa91e7bc..1923c78b57c38 100755 --- a/tools/dev/histogram.py +++ b/tools/dev/histogram.py @@ -42,7 +42,7 @@ def histogram(counts, width): for author, count in sorted(counts.items(), key=operator.itemgetter(1), # sort on count reverse=True): - print "%-*s | %s" % (max_len, author, "X"*int(count/adjustor)) + print("%-*s | %s" % (max_len, author, "X"*int(count/adjustor))) if __name__ == '__main__': diff --git a/tools/dev/sbox-ospath.py b/tools/dev/sbox-ospath.py index e510cd5f83aae..9e387551d1272 100755 --- a/tools/dev/sbox-ospath.py +++ b/tools/dev/sbox-ospath.py @@ -54,10 +54,10 @@ def rewrite_file(fname): lines[i] = line[:start] + 'sbox.ospath(' + parts + ')' + line[end:] count += 1 if count == 0: - print 'No changes.' + print('No changes.') else: open(fname, 'w').writelines(lines) - print '%s rewrites performed.' % (count,) + print('%s rewrites performed.' % (count,)) if __name__ == '__main__': diff --git a/tools/dev/wc-format.py b/tools/dev/wc-format.py index fc6ef0789d910..3ecfad0622c1b 100755 --- a/tools/dev/wc-format.py +++ b/tools/dev/wc-format.py @@ -53,7 +53,7 @@ def print_format(wc_path): # 1.6.x: format 10 # 1.7.x: format 29 formatno = get_format(wc_path) - print '%s: %s' % (wc_path, formatno) + print('%s: %s' % (wc_path, formatno)) if __name__ == '__main__': diff --git a/tools/dev/wc-ng/bump-to-19.py b/tools/dev/wc-ng/bump-to-19.py index 31b05e838d828..95db4c58cc32f 100755 --- a/tools/dev/wc-ng/bump-to-19.py +++ b/tools/dev/wc-ng/bump-to-19.py @@ -254,7 +254,7 @@ def migrate_wc_subdirs(wc_root_path): dirs.remove(dot_svn) except ValueError: # a non-WC dir: don't walk into any subdirectories - print "skipped: ", NotASubversionWC(dir_path) + print("skipped: ", NotASubversionWC(dir_path)) del dirs[:] continue @@ -265,18 +265,18 @@ def migrate_wc_subdirs(wc_root_path): wc_subdir_path = wc_subdir_path[2:] if not select_subdir(wc_subdir_path): - print "skipped:", wc_subdir_path + print("skipped:", wc_subdir_path) dirs.remove(dir) continue try: check_wc_format_number(wc_subdir_path) - print "migrating '" + wc_subdir_path + "'" + print("migrating '" + wc_subdir_path + "'") copy_db_rows_to_wcroot(wc_subdir_path) move_and_shard_pristine_files(wc_subdir_path, '.') migrated_subdirs += [wc_subdir_path] except (WrongFormatException, NotASubversionWC) as e: - print "skipped:", e + print("skipped:", e) # don't walk into it dirs.remove(dir) continue @@ -285,7 +285,7 @@ def migrate_wc_subdirs(wc_root_path): # Make a note of any problems in deleting. failed_delete_subdirs = [] for wc_subdir_path in migrated_subdirs: - print "deleting " + dotsvn_path(wc_subdir_path) + print("deleting " + dotsvn_path(wc_subdir_path)) try: os.remove(db_path(wc_subdir_path)) if os.path.exists(pristine_path(wc_subdir_path)): @@ -298,9 +298,9 @@ def migrate_wc_subdirs(wc_root_path): # Notify any problems in deleting if failed_delete_subdirs: - print "Failed to delete the following directories. Please delete them manually." + print("Failed to delete the following directories. Please delete them manually.") for wc_subdir_path in failed_delete_subdirs: - print " " + dotsvn_path(wc_subdir_path) + print(" " + dotsvn_path(wc_subdir_path)) os.chdir(old_cwd) @@ -347,10 +347,10 @@ def bump_wc_format_number(wc_path): try: check_wc_format_number(wc_root_path) except (WrongFormatException, NotASubversionWC) as e: - print "error:", e + print("error:", e) sys.exit(1) - print "merging subdir DBs into single DB '" + wc_root_path + "'" + print("merging subdir DBs into single DB '" + wc_root_path + "'") move_and_shard_pristine_files(wc_root_path, wc_root_path) migrate_wc_subdirs(wc_root_path) bump_wc_format_number(wc_root_path) diff --git a/tools/dist/collect_sigs.py b/tools/dist/collect_sigs.py index cdb22bf8524f7..c3ddb79740153 100755 --- a/tools/dist/collect_sigs.py +++ b/tools/dist/collect_sigs.py @@ -42,7 +42,7 @@ def make_config(): 'Output a blank config file' if os.path.exists('config.py'): - print "'config.py' already exists!'" + print("'config.py' already exists!'") sys.exit(1) conf = open('config.py', 'w') @@ -51,7 +51,7 @@ def make_config(): conf.write("filesdir = ''\n") conf.close() - print "'config.py' generated" + print("'config.py' generated") def make_db(): 'Initialize a blank database' @@ -114,9 +114,9 @@ def _open(filename): sys.path.append(os.path.dirname(sys.argv[0])) import config except: - print 'Content-type: text/plain' + print('Content-type: text/plain') print - print 'Cannot find config file' + print('Cannot find config file') sys.exit(1) r = re.compile('^\[GNUPG\:\] GOODSIG (\w*) (.*)') @@ -310,8 +310,8 @@ def cat_signatures(basename): return (open(ascfile, 'r').read()) def print_content_type(mimetype): - print "Content-Type: " + mimetype - print + print("Content-Type: " + mimetype) + print() def main(): form = cgi.FieldStorage() diff --git a/tools/examples/get-location-segments.py b/tools/examples/get-location-segments.py index 7a73d39d8cab8..705ffc5d6ee16 100755 --- a/tools/examples/get-location-segments.py +++ b/tools/examples/get-location-segments.py @@ -73,14 +73,14 @@ def parse_args(args): def prompt_func_ssl_unknown_cert(realm, failures, cert_info, may_save, pool): - print "The certficate details are as follows:" - print "--------------------------------------" - print "Issuer : " + str(cert_info.issuer_dname) - print "Hostname : " + str(cert_info.hostname) - print "ValidFrom : " + str(cert_info.valid_from) - print "ValidUpto : " + str(cert_info.valid_until) - print "Fingerprint: " + str(cert_info.fingerprint) - print "" + print( "The certficate details are as follows:") + print("--------------------------------------") + print("Issuer : " + str(cert_info.issuer_dname)) + print("Hostname : " + str(cert_info.hostname)) + print("ValidFrom : " + str(cert_info.valid_from)) + print("ValidUpto : " + str(cert_info.valid_until)) + print("Fingerprint: " + str(cert_info.fingerprint)) + print("") ssl_trust = core.svn_auth_cred_ssl_server_trust_t() if may_save: choice = raw_input( "accept (t)temporarily (p)permanently: ") diff --git a/tools/server-side/svnpredumpfilter.py b/tools/server-side/svnpredumpfilter.py index 58f159c3d3543..04190c1dea3bd 100755 --- a/tools/server-side/svnpredumpfilter.py +++ b/tools/server-side/svnpredumpfilter.py @@ -243,21 +243,21 @@ def svn_log_stream_get_dependencies(stream, included_paths): return dt def analyze_logs(included_paths): - print "Initial include paths:" + print("Initial include paths:") for path in included_paths: - print " + /%s" % (path) + print(" + /%s" % (path)) dt = svn_log_stream_get_dependencies(sys.stdin, included_paths) if dt.dependent_paths: found_new_deps = True - print "Dependent include paths found:" + print("Dependent include paths found:") for path in dt.dependent_paths: - print " + /%s" % (path) - print "You need to also include them (or one of their parents)." + print(" + /%s" % (path)) + print("You need to also include them (or one of their parents).") else: found_new_deps = False - print "No new dependencies found!" + print("No new dependencies found!") parents = {} for path in dt.include_paths: while 1: @@ -268,11 +268,11 @@ def analyze_logs(included_paths): path = parent parents = parents.keys() if parents: - print "You might still need to manually create parent directories " \ - "for the included paths before loading a filtered dump:" + print("You might still need to manually create parent directories " \ + "for the included paths before loading a filtered dump:") parents.sort(compare_paths) for parent in parents: - print " /%s" % (parent) + print(" /%s" % (parent)) return found_new_deps and EXIT_MOREDEPS or EXIT_SUCCESS diff --git a/tools/server-side/svnpubsub/irkerbridge.py b/tools/server-side/svnpubsub/irkerbridge.py index 04b7ee25db51a..497e8f53c2daf 100755 --- a/tools/server-side/svnpubsub/irkerbridge.py +++ b/tools/server-side/svnpubsub/irkerbridge.py @@ -110,7 +110,7 @@ def setup(self): pass def run(self): - print 'irkerbridge started, pid=%d' % (os.getpid()) + print('irkerbridge started, pid=%d' % (os.getpid())) mc = svnpubsub.client.MultiClient(self.bdec.urls, self.bdec.commit, @@ -197,7 +197,7 @@ def _send(self, irker, msg): json_msg = json.dumps(msg) sock.sendto(json_msg, (irker_list[0],int(irker_list[1]))) if self.options.verbose: - print "SENT: %s to %s" % (json_msg, irker) + print("SENT: %s to %s" % (json_msg, irker)) def join_all(self): # Like self.commit(), but ignores self.config.get(section, "template"). @@ -212,7 +212,7 @@ def join_all(self): def commit(self, url, commit): if self.options.verbose: - print "RECV: from %s" % url + print("RECV: from %s" % url) print json.dumps(vars(commit), indent=2) try: @@ -233,14 +233,14 @@ def commit(self, url, commit): self._send(irker, msg) except: - print "Unexpected error:" + print("Unexpected error:") traceback.print_exc() sys.stdout.flush() raise def event(self, url, event_name, event_arg): if self.options.verbose or event_name != "ping": - print 'EVENT: %s from %s' % (event_name, url) + print('EVENT: %s from %s' % (event_name, url)) sys.stdout.flush() @@ -258,7 +258,7 @@ def hangup(self, signalnum, frame): self.reload() def reload(self): - print "RELOAD: config file: %s" % self.fname + print("RELOAD: config file: %s" % self.fname) sys.stdout.flush() # Delete everything. Just re-reading would overlay, and would not diff --git a/tools/server-side/svnpubsub/svntweet.py b/tools/server-side/svnpubsub/svntweet.py index ed426bd21a165..8a5c73a24bd30 100755 --- a/tools/server-side/svnpubsub/svntweet.py +++ b/tools/server-side/svnpubsub/svntweet.py @@ -50,7 +50,7 @@ try: import twitter except: - print "Get a copy of twitty-twister from " + print("Get a copy of twitty-twister from ") sys.exit(-1) class Config(object): def __init__(self, path): @@ -231,7 +231,7 @@ def main(config_file): if __name__ == "__main__": if len(sys.argv) != 2: - print "invalid args, read source code" + print("invalid args, read source code") sys.exit(0) log.startLogging(sys.stdout) main(sys.argv[1])