diff options
author | Kaleb S. KEITHLEY <kkeithle@redhat.com> | 2018-04-13 09:13:16 -0400 |
---|---|---|
committer | Kaleb KEITHLEY <kkeithle@redhat.com> | 2018-05-02 11:28:46 +0000 |
commit | 4e7b423d3c3137c3f83b71b36279e1a544154833 (patch) | |
tree | e59dc57aa319a5e145161a0e32fba9fc74773e91 /extras | |
parent | 9da91172538a2a95fba609c93e199db159fd1938 (diff) |
core/various: python3 compat, prepare for python2 -> python3
see https://review.gluster.org/#/c/19788/
use print fn from __future__
Change-Id: If5075d8d9ca9641058fbc71df8a52aa35804cda4
updates: #411
Signed-off-by: Kaleb S. KEITHLEY <kkeithle@redhat.com>
Diffstat (limited to 'extras')
-rwxr-xr-x | extras/create_new_xlator/generate_xlator.py | 58 | ||||
-rwxr-xr-x | extras/failed-tests.py | 25 | ||||
-rwxr-xr-x | extras/git-branch-diff.py | 47 | ||||
-rw-r--r-- | extras/gnfs-loganalyse.py | 5 | ||||
-rwxr-xr-x | extras/prot_filter.py | 21 | ||||
-rwxr-xr-x | extras/quota/xattr_analysis.py | 9 | ||||
-rwxr-xr-x | extras/rebalance.py | 37 | ||||
-rw-r--r-- | extras/volfilter.py | 15 |
8 files changed, 113 insertions, 104 deletions
diff --git a/extras/create_new_xlator/generate_xlator.py b/extras/create_new_xlator/generate_xlator.py index c51c3a41a22..281afc2e0df 100755 --- a/extras/create_new_xlator/generate_xlator.py +++ b/extras/create_new_xlator/generate_xlator.py @@ -1,4 +1,6 @@ #!/usr/bin/python2 + +from __future__ import print_function import os import re import sys @@ -74,42 +76,42 @@ def generate(tmpl, name, table): def gen_xlator(): xl = open(src_dir_path+"/"+xl_name+".c", 'w+') - print >> xl, COPYRIGHT - print >> xl, fragments["INCLUDE_IN_SRC_FILE"].replace("@XL_NAME@", - xl_name) + print(COPYRIGHT, file=xl) + print(fragments["INCLUDE_IN_SRC_FILE"].replace("@XL_NAME@", + xl_name), file=xl) #Generate cbks and fops for fop in ops: - print >> xl, generate(fragments["CBK_TEMPLATE"], fop, ops) - print >> xl, generate(fragments["FOP_TEMPLATE"], fop, ops) + print(generate(fragments["CBK_TEMPLATE"], fop, ops), file=xl) + print(generate(fragments["FOP_TEMPLATE"], fop, ops), file=xl) for cbk in xlator_cbks: - print >> xl, generate(fragments["FUNC_TEMPLATE"], cbk, - xlator_cbks) + print(generate(fragments["FUNC_TEMPLATE"], cbk, + xlator_cbks), file=xl) for dops in xlator_dumpops: - print >> xl, generate(fragments["FUNC_TEMPLATE"], dops, - xlator_dumpops) + print(generate(fragments["FUNC_TEMPLATE"], dops, + xlator_dumpops), file=xl) - print >> xl, fragments["XLATOR_METHODS"] + print(fragments["XLATOR_METHODS"], file=xl) #Generate fop table - print >> xl, "struct xlator_fops fops = {" + print("struct xlator_fops fops = {", file=xl) for fop in ops: - print >> xl, " .{0:20} = {1}_{2},".format(fop, fop_prefix, fop) - print >> xl, "};" + print(" .{0:20} = {1}_{2},".format(fop, fop_prefix, fop), file=xl) + print("};", file=xl) #Generate xlator_cbks table - print >> xl, "struct xlator_cbks cbks = {" + print("struct xlator_cbks cbks = {", file=xl) for cbk in xlator_cbks: - print >> xl, " .{0:20} = {1}_{2},".format(cbk, fop_prefix, cbk) - print >> xl, "};" + print(" .{0:20} = {1}_{2},".format(cbk, fop_prefix, cbk), file=xl) + print("};", file=xl) #Generate xlator_dumpops table - print >> xl, "struct xlator_dumpops dumpops = {" + print("struct xlator_dumpops dumpops = {", file=xl) for dops in xlator_dumpops: - print >> xl, " .{0:20} = {1}_{2},".format(dops, fop_prefix, dops) - print >> xl, "};" + print(" .{0:20} = {1}_{2},".format(dops, fop_prefix, dops), file=xl) + print("};", file=xl) xl.close() @@ -122,38 +124,38 @@ def create_dir_struct(): def gen_header_files(): upname = xl_name_no_hyphen.upper() h = open(src_dir_path+"/"+xl_name+".h", 'w+') - print >> h, COPYRIGHT + print(COPYRIGHT, file=h) txt = fragments["HEADER_FMT"].replace("@HFL_NAME@", upname) txt2 = fragments["INCLUDE_IN_HEADER_FILE"].replace("@XL_NAME@", xl_name) txt = txt.replace("@INCLUDE_SECT@",txt2) - print >> h, txt + print(txt, file=h) h.close() h = open(src_dir_path+"/"+xl_name+"-mem-types.h", 'w+') - print >> h, COPYRIGHT + print(COPYRIGHT, file=h) txt = fragments["HEADER_FMT"].replace("@HFL_NAME@", upname+"_MEM_TYPES") txt = txt.replace("@INCLUDE_SECT@", '#include "mem-types.h"') - print >> h, txt + print(txt, file=h) h.close() h = open(src_dir_path+"/"+xl_name+"-messages.h", 'w+') - print >> h, COPYRIGHT + print(COPYRIGHT, file=h) txt = fragments["HEADER_FMT"].replace("@HFL_NAME@", upname+"_MESSAGES") txt = txt.replace("@INCLUDE_SECT@", '') - print >> h, txt + print(txt, file=h) h.close() def gen_makefiles(): m = open(dir_path+"/Makefile.am", 'w+') - print >> m, "SUBDIRS = src\n\nCLEANFILES =" + print("SUBDIRS = src\n\nCLEANFILES =", file=m) m.close() m = open(src_dir_path+"/Makefile.am", 'w+') txt = MAKEFILE_FMT.replace("@XL_NAME@", xl_name) txt = txt.replace("@XL_NAME_NO_HYPHEN@", xl_name_no_hyphen) txt = txt.replace("@XL_TYPE@",xlator_type) - print >> m, txt + print(txt, file=m) m.close() def get_copyright (): @@ -183,7 +185,7 @@ def load_fragments (): if __name__ == '__main__': if len(sys.argv) < 3: - print "USAGE: ./gen_xlator <XLATOR_DIR> <XLATOR_NAME> <FOP_PREFIX>" + print("USAGE: ./gen_xlator <XLATOR_DIR> <XLATOR_NAME> <FOP_PREFIX>") sys.exit(0) xl_name = sys.argv[2] diff --git a/extras/failed-tests.py b/extras/failed-tests.py index 8391d229b45..1ef1a954f4f 100755 --- a/extras/failed-tests.py +++ b/extras/failed-tests.py @@ -1,5 +1,6 @@ #!/usr/bin/python2 +from __future__ import print_function import blessings import requests from requests.packages.urllib3.exceptions import InsecureRequestWarning @@ -25,7 +26,7 @@ def process_failure(url, node): if t.find("Result: FAIL") != -1: for t2 in accum: if VERBOSE: - print t2.encode('utf-8') + print(t2.encode('utf-8')) if t2.find("Wstat") != -1: test_case = re.search('\./tests/.*\.t', t2) if test_case: @@ -69,26 +70,26 @@ def print_summary(failed_builds, total_builds, html=False): template = 0 if html: template = 1 - print render( + print(render( count[template], {'failed': failed_builds, 'total': total_builds} - ) + )) for k, v in summary.iteritems(): if k == 'core': - print ''.join([TERM.red, "Found cores:", TERM.normal]) + print(''.join([TERM.red, "Found cores:", TERM.normal])) for comp, link in zip(v[::2], v[1::2]): - print render(component[template], {'comp': comp}) - print render( + print(render(component[template], {'comp': comp})) + print(render( regression_link[template], {'link': link[0], 'node': link[1]} - ) + )) else: - print render(failure_count[template], {'test': k, 'count': len(v)}) + print(render(failure_count[template], {'test': k, 'count': len(v)})) for link in v: - print render( + print(render( regression_link[template], {'link': link[0], 'node': link[1]} - ) + )) def get_summary(cut_off_date, reg_link): @@ -114,11 +115,11 @@ def get_summary(cut_off_date, reg_link): success_count += 1 continue if VERBOSE: - print ''.join([ + print(''.join([ TERM.red, 'FAILURE on {0}'.format(build['url']), TERM.normal - ]) + ])) url = ''.join([build['url'], 'consoleText']) failure_count += 1 process_failure(url, build['builtOn']) diff --git a/extras/git-branch-diff.py b/extras/git-branch-diff.py index c9e9dd0da06..99cc707b837 100755 --- a/extras/git-branch-diff.py +++ b/extras/git-branch-diff.py @@ -75,6 +75,7 @@ Prasanna Kumar Kalever <prasanna.kalever@redhat.com> """ +from __future__ import print_function import os import sys import argparse @@ -118,11 +119,11 @@ class GitBranchDiff: status_tbr, op = commands.getstatusoutput('git log ' + self.t_pattern) if status_sbr != 0: - print "Error: --source=" + self.s_pattern + " doesn't exit\n" + print("Error: --source=" + self.s_pattern + " doesn't exit\n") self.parser.print_help() exit(status_sbr) elif status_tbr != 0: - print "Error: --target=" + self.t_pattern + " doesn't exit\n" + print("Error: --target=" + self.t_pattern + " doesn't exit\n") self.parser.print_help() exit(status_tbr) @@ -137,8 +138,8 @@ class GitBranchDiff: cmd4 = 'git log ' + self.s_pattern + ' --author=' + ide c_list = subprocess.check_output(cmd4, shell = True) if len(c_list) is 0: - print "Error: --author=%s doesn't exit" %self.g_author - print "see '%s --help'" %__file__ + print("Error: --author=%s doesn't exit" %self.g_author) + print("see '%s --help'" %__file__) exit(1) if len(ide_list) > 1: self.g_author = "\|".join(ide_list) @@ -150,16 +151,16 @@ class GitBranchDiff: return True except requests.Timeout as err: " request timed out" - print "Warning: failed to get list of open review commits on " \ + print("Warning: failed to get list of open review commits on " \ "gerrit.\n" \ "hint: Request timed out! gerrit server could possibly " \ - "slow ...\n" + "slow ...\n") return False except requests.RequestException as err: " handle other errors" - print "Warning: failed to get list of open review commits on " \ + print("Warning: failed to get list of open review commits on " \ "gerrit\n" \ - "hint: check with internet connection ...\n" + "hint: check with internet connection ...\n") return False def parse_cmd_args (self): @@ -212,18 +213,18 @@ class GitBranchDiff: def print_output (self): " display the result list" - print "\n------------------------------------------------------------\n" - print self.tick + " Successfully Backported changes:" - print ' {' + 'from: ' + self.s_pattern + \ - ' to: '+ self.t_pattern + '}\n' + print("\n------------------------------------------------------------\n") + print(self.tick + " Successfully Backported changes:") + print(' {' + 'from: ' + self.s_pattern + \ + ' to: '+ self.t_pattern + '}\n') for key, value in self.s_dict.iteritems(): if value in self.t_dict.itervalues(): - print "[%s%s%s] %s" %(self.yello_set, + print("[%s%s%s] %s" %(self.yello_set, key, self.color_unset, - value) - print "\n------------------------------------------------------------\n" - print self.cross + " Missing patches in " + self.t_pattern + ':\n' + value)) + print("\n------------------------------------------------------------\n") + print(self.cross + " Missing patches in " + self.t_pattern + ':\n') if self.connected_to_gerrit(): cmd3 = "git review -r origin -l" review_list = subprocess.check_output(cmd3, shell = True).split('\n') @@ -233,18 +234,18 @@ class GitBranchDiff: for key, value in self.s_dict.iteritems(): if value not in self.t_dict.itervalues(): if any(value in s for s in review_list): - print "[%s%s%s] %s %s(under review)%s" %(self.yello_set, + print("[%s%s%s] %s %s(under review)%s" %(self.yello_set, key, self.color_unset, value, self.green_set, - self.color_unset) + self.color_unset)) else: - print "[%s%s%s] %s" %(self.yello_set, + print("[%s%s%s] %s" %(self.yello_set, key, self.color_unset, - value) - print "\n------------------------------------------------------------\n" + value)) + print("\n------------------------------------------------------------\n") def main (self): self.check_pattern_exist() @@ -262,8 +263,8 @@ class GitBranchDiff: t_list = subprocess.check_output(cmd2, shell = True) if len(t_list) is 0: - print "No commits in the target: %s" %self.t_pattern - print "see '%s --help'" %__file__ + print("No commits in the target: %s" %self.t_pattern) + print("see '%s --help'" %__file__) exit() else: t_list = t_list.split('\n') diff --git a/extras/gnfs-loganalyse.py b/extras/gnfs-loganalyse.py index 71e79b6be4e..6341d007188 100644 --- a/extras/gnfs-loganalyse.py +++ b/extras/gnfs-loganalyse.py @@ -10,6 +10,7 @@ """ +from __future__ import print_function import os import string import sys @@ -72,7 +73,7 @@ class NFSRequest: self.replygfid = tokens [gfididx + 1].strip(",") def dump (self): - print "ReqLine: " + str(self.calllinecount) + " TimeStamp: " + self.timestamp + ", XID: " + self.xid + " " + self.op + " ARGS: " + self.opdata + " RepLine: " + str(self.replylinecount) + " " + self.replydata + print("ReqLine: " + str(self.calllinecount) + " TimeStamp: " + self.timestamp + ", XID: " + self.xid + " " + self.op + " ARGS: " + self.opdata + " RepLine: " + str(self.replylinecount) + " " + self.replydata) class NFSLogAnalyzer: @@ -149,7 +150,7 @@ class NFSLogAnalyzer: return rcount = len (self.xid_request_map.keys ()) orphancount = len (self.orphan_replies.keys ()) - print "Requests: " + str(rcount) + ", Orphans: " + str(orphancount) + print("Requests: " + str(rcount) + ", Orphans: " + str(orphancount)) def dump (self): self.getStats () diff --git a/extras/prot_filter.py b/extras/prot_filter.py index 290792697a2..0c48fd5b8e1 100755 --- a/extras/prot_filter.py +++ b/extras/prot_filter.py @@ -21,13 +21,14 @@ deliberate choice so that it will catch deletions from those sources as well. """ -volume_list = [ "jdtest" ] - +from __future__ import print_function import copy import string import sys import types +volume_list = [ "jdtest" ] + class Translator: def __init__ (self, name): self.name = name @@ -86,16 +87,16 @@ def generate (graph, last, stream=sys.stdout): for sv in last.subvols: if not sv.dumped: generate(graph,sv,stream) - print >> stream, "" + print("", file=stream) sv.dumped = True - print >> stream, "volume %s" % last.name - print >> stream, " type %s" % last.xl_type + print("volume %s" % last.name, file=stream) + print(" type %s" % last.xl_type, file=stream) for k, v in last.opts.iteritems(): - print >> stream, " option %s %s" % (k, v) + print(" option %s %s" % (k, v), file=stream) if last.subvols: - print >> stream, " subvolumes %s" % string.join( - [ sv.name for sv in last.subvols ]) - print >> stream, "end-volume" + print(" subvolumes %s" % string.join( + [ sv.name for sv in last.subvols ]), file=stream) + print("end-volume", file=stream) def push_filter (graph, old_xl, filt_type, opts={}): new_type = "-" + filt_type.split("/")[1] @@ -128,7 +129,7 @@ if __name__ == "__main__": if graph.has_key(v): break else: - print "No configured volumes found - aborting." + print("No configured volumes found - aborting.") sys.exit(0) for v in graph.values(): if v.xl_type == "cluster/distribute": diff --git a/extras/quota/xattr_analysis.py b/extras/quota/xattr_analysis.py index 512fcd39b88..9a178e058c2 100755 --- a/extras/quota/xattr_analysis.py +++ b/extras/quota/xattr_analysis.py @@ -7,6 +7,7 @@ # The script takes only one input LOG _FILE generated from the command, # find <brick_path> | xargs getfattr -d -m. -e hex > log_gluster_xattr +from __future__ import print_function import re import subprocess import sys @@ -28,14 +29,14 @@ def get_quota_xattr_brick(): for xattr in pairs: k = xattr.split("=")[0] if re.search("# file:",k): - print xdict + print(xdict) filename=k - print "=====" + filename + "=======" + print("=====" + filename + "=======") xdict = {} elif k is "": pass else: - print xattr + print(xattr) v = xattr.split("=")[1] if re.search("contri",k): if len(v) == 34: @@ -64,7 +65,7 @@ def get_quota_xattr_brick(): mismatch_size.append((xdict['contri_size'], xdict['size'], filename)) for values in mismatch_size: - print values + print(values) if __name__ == '__main__': diff --git a/extras/rebalance.py b/extras/rebalance.py index b2ec6a52290..69ce282b39e 100755 --- a/extras/rebalance.py +++ b/extras/rebalance.py @@ -1,5 +1,6 @@ #!/usr/bin/python2 +from __future__ import print_function import atexit import copy import optparse @@ -86,7 +87,7 @@ def get_range (brick): try: value = f.readline().rstrip().split('=')[1][2:] except: - print "could not get layout for %s (might be OK)" % brick + print("could not get layout for %s (might be OK)" % brick) return None v_start = int("0x"+value[16:24],16) v_end = int("0x"+value[24:32],16) @@ -126,7 +127,7 @@ def normalize (in_bricks): curr_hash = b.r_end + 1 break else: - print "gap found at 0x%08x" % curr_hash + print("gap found at 0x%08x" % curr_hash) sys.exit(1) return out_bricks + in_bricks, used @@ -183,7 +184,7 @@ if __name__ == "__main__": def cleanup_workdir (): os.chdir(orig_dir) if options.verbose: - print "Cleaning up %s" % work_dir + print("Cleaning up %s" % work_dir) for b in bricks: subprocess.call(["umount",b.path]) shutil.rmtree(work_dir) @@ -193,7 +194,7 @@ if __name__ == "__main__": # Mount each brick individually, so we can issue brick-specific calls. if options.verbose: - print "Mounting subvolumes..." + print("Mounting subvolumes...") index = 0 volfile_pipe = get_bricks(hostname,volname) all_xlators, last_xlator = volfilter.load(volfile_pipe) @@ -201,7 +202,7 @@ if __name__ == "__main__": if dht_vol.type == "cluster/distribute": break else: - print "no DHT volume found" + print("no DHT volume found") sys.exit(1) for sv in dht_vol.subvols: #print "found subvol %s" % sv.name @@ -210,12 +211,12 @@ if __name__ == "__main__": mount_brick(lpath,all_xlators,sv) bricks.append(Brick(lpath,sv.name)) if index == 0: - print "no bricks" + print("no bricks") sys.exit(1) # Collect all of the sizes. if options.verbose: - print "Collecting information..." + print("Collecting information...") total = 0 for b in bricks: info = os.statvfs(b.path) @@ -237,7 +238,7 @@ if __name__ == "__main__": else: size = info[2] / blocksper100mb if size <= 0: - print "brick %s has invalid size %d" % (b.path, size) + print("brick %s has invalid size %d" % (b.path, size)) sys.exit(1) b.set_size(size) total += size @@ -248,12 +249,12 @@ if __name__ == "__main__": if hash_range is not None: rs, re = hash_range if rs > re: - print "%s has backwards hash range" % b.path + print("%s has backwards hash range" % b.path) sys.exit(1) b.set_range(hash_range[0],hash_range[1]) if options.verbose: - print "Calculating new layouts..." + print("Calculating new layouts...") calc_sizes(bricks,total) bricks, used = normalize(bricks) @@ -283,25 +284,25 @@ if __name__ == "__main__": curr_hash += b.good_size b.r_end = curr_hash - 1 - print "Here are the xattr values for your size-weighted layout:" + print("Here are the xattr values for your size-weighted layout:") for b in bricks: - print " %s: 0x0000000200000000%08x%08x" % ( - b.sv_name, b.r_start, b.r_end) + print(" %s: 0x0000000200000000%08x%08x" % ( + b.sv_name, b.r_start, b.r_end)) if fix_dir: if options.verbose: - print "Fixing layout for %s" % fix_dir + print("Fixing layout for %s" % fix_dir) for b in bricks: value = "0x0000000200000000%08x%08x" % ( b.r_start, b.r_end) path = "%s/%s" % (b.path, fix_dir) cmd = "setfattr -n trusted.glusterfs.dht -v %s %s" % ( value, path) - print cmd + print(cmd) if options.leave_mounted: - print "The following subvolumes are still mounted:" + print("The following subvolumes are still mounted:") for b in bricks: - print "%s on %s" % (b.sv_name, b.path) - print "Don't forget to clean up when you're done." + print("%s on %s" % (b.sv_name, b.path)) + print("Don't forget to clean up when you're done.") diff --git a/extras/volfilter.py b/extras/volfilter.py index 0ca456a7882..d242e60dcba 100644 --- a/extras/volfilter.py +++ b/extras/volfilter.py @@ -13,6 +13,7 @@ # You should have received a copy of the GNU General Public License * along # with HekaFS. If not, see <http://www.gnu.org/licenses/>. +from __future__ import print_function import copy import string import sys @@ -127,16 +128,16 @@ def generate (graph, last, stream=sys.stdout): for sv in last.subvols: if not sv.dumped: generate(graph,sv,stream) - print >> stream, "" + print("", file=stream) sv.dumped = True - print >> stream, "volume %s" % last.name - print >> stream, " type %s" % last.type + print("volume %s" % last.name, file=stream) + print(" type %s" % last.type, file=stream) for k, v in last.opts.iteritems(): - print >> stream, " option %s %s" % (k, v) + print(" option %s %s" % (k, v), file=stream) if last.subvols: - print >> stream, " subvolumes %s" % string.join( - [ sv.name for sv in last.subvols ]) - print >> stream, "end-volume" + print(" subvolumes %s" % string.join( + [ sv.name for sv in last.subvols ]), file=stream) + print("end-volume", file=stream) def push_filter (graph, old_xl, filt_type, opts={}): suffix = "-" + old_xl.type.split("/")[1] |