diff options
author | M S Vishwanath Bhat <vishwanath@gluster.com> | 2012-02-24 13:18:56 +0530 |
---|---|---|
committer | Vijay Bellur <vijay@gluster.com> | 2012-03-07 23:18:29 -0800 |
commit | 5fdd65f5f4f5df1d28b0fb4f7efed226d5db1b3c (patch) | |
tree | 377a94774c5cd9f55b16ba6fcd1c7b5ec51bfa3b /hdfs/0.20.2/tools/build-deploy-jar.py | |
parent | e1ab347720f25ed2e7db633a7202f7b873f4b90a (diff) |
renaming hdfs -> glusterfs-hadoop
Change-Id: Ibb937af1231f6bbed9a2d4eaeabc6e9d4000887f
BUG: 797064
Signed-off-by: M S Vishwanath Bhat <vishwanath@gluster.com>
Reviewed-on: http://review.gluster.com/2811
Tested-by: Gluster Build System <jenkins@build.gluster.com>
Reviewed-by: Vijay Bellur <vijay@gluster.com>
Diffstat (limited to 'hdfs/0.20.2/tools/build-deploy-jar.py')
-rw-r--r-- | hdfs/0.20.2/tools/build-deploy-jar.py | 212 |
1 files changed, 0 insertions, 212 deletions
diff --git a/hdfs/0.20.2/tools/build-deploy-jar.py b/hdfs/0.20.2/tools/build-deploy-jar.py deleted file mode 100644 index 450e08fb0c6..00000000000 --- a/hdfs/0.20.2/tools/build-deploy-jar.py +++ /dev/null @@ -1,212 +0,0 @@ -#!/usr/bin/python - -## - # - # Copyright (c) 2011 Gluster, Inc. <http://www.gluster.com> - # This file is part of GlusterFS. - # - # Licensed under the Apache License, Version 2.0 - # (the "License"); you may not use this file except in compliance with - # the License. You may obtain a copy of the License at - # - # http://www.apache.org/licenses/LICENSE-2.0 - # - # Unless required by applicable law or agreed to in writing, software - # distributed under the License is distributed on an "AS IS" BASIS, - # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - # implied. See the License for the specific language governing - # permissions and limitations under the License. - # - ## - -import getopt -import glob -import sys, os -import shutil -import subprocess, shlex - -def usage(): - print "usage: python build-deploy-jar.py [-b/--build] -d/--dir <hadoop-home> [-c/--core] [-m/--mapred] [-h/--henv]" - -def addSlash(s): - if not (s[-1] == '/'): - s = s + '/' - - return s - -def whereis(program): - abspath = None - for path in (os.environ.get('PATH', '')).split(':'): - abspath = os.path.join(path, program) - if os.path.exists(abspath) and not os.path.isdir(abspath): - return abspath - - return None - -def getLatestJar(targetdir): - latestJar = None - glusterfsJar = glob.glob(targetdir + "*.jar") - if len(glusterfsJar) == 0: - print "No GlusterFS jar file found in %s ... exiting" % (targetdir) - return None - - # pick up the latest jar file - just in case ... - stat = latestJar = None - ctime = 0 - - for jar in glusterfsJar: - stat = os.stat(jar) - if stat.st_ctime > ctime: - latestJar = jar - ctime = stat.st_ctime - - return latestJar - -# build the glusterfs hadoop plugin using maven -def build_jar(): - location = whereis('mvn') - - if location == None: - print "Cannot find maven to build glusterfs hadoop jar" - print "please install maven or if it's already installed then fix your PATH environ" - return None - - # do a clean packaging - targetdir = "./target/" - if os.path.exists(targetdir) and os.path.isdir(targetdir): - print "Cleaning up directories ... [ " + targetdir + " ]" - shutil.rmtree(targetdir) - - print "Building glusterfs jar ..." - process = subprocess.Popen(['package'], shell=True, - executable=location, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - - try: - (pout, perr) = process.communicate() - except: - process.wait() - if not process.returncode == 0: - print "Building glusterfs jar failed" - return None - - latestJar = getLatestJar(targetdir) - return latestJar - -def rcopy(f, host, libdir): - print " * doing remote copy to host %s" % (host) - scpCmd = "scp %s %s:%s" % (f, host, libdir) - - os.system(scpCmd); - -def deployInSlave(f, confdir, libdir, cc, cm, he): - slavefile = confdir + "slaves" - - ccFile = confdir + "core-site.xml" - cmFile = confdir + "mapred-site.xml" - heFile = confdir + "hadoop-env.sh" - - sf = open(slavefile, 'r') - for host in sf: - host = host.rstrip('\n') - print " >>> Deploying %s on %s ..." % (os.path.basename(f), host) - rcopy(f, host, libdir) - - if cc: - print " >>> Deploying [%s] on %s ..." % (os.path.basename(ccFile), host) - rcopy(ccFile, host, confdir) - - if cm: - print " >>> Deploying [%s] on %s ..." % (os.path.basename(cmFile), host) - rcopy(cmFile, host, confdir) - - if he: - print " >>> Deploying [%s] on %s ..." % (os.path.basename(heFile), host) - rcopy(heFile, host, confdir); - - print "<<< Done\n" - - sf.close() - -def deployInMaster(f, confdir, libdir): - import socket - masterfile = confdir + "masters" - - mf = open(masterfile, 'r') - for host in mf: - host = host.rstrip('\n') - print " >>> Deploying %s on %s ..." % (os.path.basename(f), host) - h = host - try: - socket.inet_aton(host) - h = socket.getfqdn(host) - except socket.error: - # host is not a ip adddress - pass - - if h == socket.gethostname() or h == 'localhost': - # local cp - print " * doing local copy" - shutil.copy(f, libdir) - else: - # scp the file - rcopy(f, h, libdir) - - print "<<< Done\n" - - mf.close() - -if __name__ == '__main__': - opt = args = [] - try: - opt, args = getopt.getopt(sys.argv[1:], "bd:cmh", ["build", "dir=", "core", "mapred", "henv"]); - except getopt.GetoptError, err: - print str(err) - usage() - sys.exit(1) - - needbuild = hadoop_dir = copyCore = copyMapred = copyHadoopEnv = None - - for k, v in opt: - if k in ("-b", "--build"): - needbuild = True - elif k in ("-d", "--dir"): - hadoop_dir = v - elif k in ("-c", "--core"): - copyCore = True - elif k in ("-m", "--mapred"): - copyMapred = True - elif k in ("-h", "--henv"): - copyHadoopEnv = True - else: - assert False, "unhandled option" - - assert not hadoop_dir == None, "hadoop directory missing" - - if needbuild: - jar = build_jar() - if jar == None: - sys.exit(1) - else: - jar = getLatestJar('./target/') - if jar == None: - print "Maybe you want to build it ? -b option" - sys.exit(1) - - print "" - print "*** Deploying %s *** " % (jar) - - # copy jar to local hadoop distribution (master) - hadoop_home = addSlash(hadoop_dir) - if not (os.path.exists(hadoop_home) and os.path.isdir(hadoop_home)): - print "path " + hadoop_home + " does not exist or is not adiretory"; - sys.exit(1); - - hadoop_conf = hadoop_home + "conf/" - hadoop_lib = hadoop_home + "lib/" - - print " >>> Scanning hadoop master file for host(s) to deploy" - deployInMaster(jar, hadoop_conf, hadoop_lib) - - print "" - print " >>> Scanning hadoop slave file for host(s) to deploy" - deployInSlave(jar, hadoop_conf, hadoop_lib, copyCore, copyMapred, copyHadoopEnv) |