From 3fa732363ac431cefe543f9096cbbcd3e6288157 Mon Sep 17 00:00:00 2001 From: David Sagan Date: Mon, 25 Sep 2023 18:55:00 -0400 Subject: [PATCH] Removed unneeded. --- util/build_config.py | 380 ------------------------------- util/build_mgmt.py | 167 -------------- util/build_supervisor | 512 ------------------------------------------ util/builder | 252 --------------------- util/purge_releases | 263 ---------------------- util/rotate_release | 269 ---------------------- util/svn_upgrade | 17 -- util/tag_release | 24 -- 8 files changed, 1884 deletions(-) delete mode 100644 util/build_config.py delete mode 100644 util/build_mgmt.py delete mode 100755 util/build_supervisor delete mode 100755 util/builder delete mode 100755 util/purge_releases delete mode 100755 util/rotate_release delete mode 100755 util/svn_upgrade delete mode 100755 util/tag_release diff --git a/util/build_config.py b/util/build_config.py deleted file mode 100644 index d4ed6034b3..0000000000 --- a/util/build_config.py +++ /dev/null @@ -1,380 +0,0 @@ -#-*-python-*- -# -# build_supervisor configuration file -#----------------------------------------------------- - -intel_offline_release_build_request = [ - 'Linux_x86_64_intel-offline' - ] - -intel_online_release_build_request = [ - 'Linux_x86_64_intel-online' - ] - -intel_packages_build_request = [ - 'packages_intel' - ] - -intel_dist_build_request = [ - 'Linux_i686_intel' - ] - -intel_local_release_build_request = [ - 'Linux_x86_64_intel-local' - ] - -intel_local_packages_build_request = [ - 'packages_intel-local' - ] - -gfortran_offline_release_build_request = [ - 'Linux_x86_64_gfortran-offline' - ] - -gfortran_online_release_build_request = [ - 'Linux_x86_64_gfortran-online' - ] - -gfortran_packages_build_request = [ - 'packages_gfortran' - ] - -gfortran_dist_build_request = [ - 'Linux_i686_gfortran' - ] - -gfortran_local_release_build_request = [ - 'Linux_x86_64_gfortran-local' - ] - -gfortran_local_packages_build_request = [ - 'packages_gfortran-local' - ] - - -#----------------------------------------------------- -# Collect all build requests by type into a master -# dictionary. -#----------------------------------------------------- -build_requests = {} -build_requests['release_intel'] = intel_offline_release_build_request -build_requests['online-release_intel'] = intel_online_release_build_request -build_requests['packages_intel'] = intel_packages_build_request -build_requests['dist_intel'] = intel_dist_build_request -build_requests['local-release_intel'] = intel_local_release_build_request -build_requests['local-packages_intel'] = intel_local_packages_build_request - -build_requests['release_gfortran'] = gfortran_offline_release_build_request -build_requests['online-release_gfortran'] = gfortran_online_release_build_request -build_requests['packages_gfortran'] = gfortran_packages_build_request -build_requests['dist_gfortran'] = gfortran_dist_build_request -build_requests['local-release_gfortran'] = gfortran_local_release_build_request -build_requests['local-packages_gfortran'] = gfortran_local_packages_build_request - -#----------------------------------------------------- -#----------------------------------------------------- -offline_base_dir = '/nfs/acc/libs' -offline_util_dir = offline_base_dir + '/util' -#offline_host = 'acc101.classe.cornell.edu' -offline_host = '$HOSTNAME' - -online_base_dir = '/nfs/cesr/online/lib' -online_util_dir = online_base_dir + '/util' -online_host = 'cesr109.classe.cornell.edu' - -local_base_dir = '/mnt/acc/libs' -local_util_dir = local_base_dir + '/util' -#local_host = 'lnx7179.classe.cornell.edu' -local_host = '$HOSTNAME' - -makefile_dir = '/usr/bin' - -release_mail_list = '$USER@cornell.edu,sbp8@cornell.edu' -local_mail_list = '$USER@cornell.edu' - -#----------------------------------------------------- -#----------------------------------------------------- - -release_build_list = [ - 'git/bmad-doc', #RT 60649 - '/trunk/util', - '/trunk/build_system', - '/trunk/include', - '/trunk/c_utils', - '/packages/forest', - '/trunk/src/sim_utils', - '/trunk/src/bmad', - '/trunk/src/cpp_bmad_interface', - '/CESR/CESR_libs/cesr_utils', - '/CESR/CESR_libs/irpdefs', - '/CESR/CESR_libs/genplt', - '/CESR/CESR_libs/mpmnet', - '/CESR/CESR_libs/timing', - '/CESR/CESR_instr/CesrBPM', - '/CESR/CESR_instr/instr_utils', - '/Comm/Comm_libs/cbi_net', - '/CESR/CESR_progs/cbpmfio', - '/CESR/CESR_instr/BeamInstSupport', - '/CESR/CESR_instr/CBPM-TSHARC', - '/Comm/Comm_libs/rfnet', - '/CESR/CESR_instr/nonlin_bpm', - '/CESR/CESR_libs/mpm_utils', - '/CESR/CESR_libs/rf', - '/CESR/CESR_progs/tune', - '/CESR/CESR_progs/gen_gui', - '/CESR/CESR_progs/diagnose', - '/CESR/CESR_services/automail', - '/CESR/CESR_services/averager', - '/CESR/CESR_services/condx', - '/CESR/CESR_services/displays', - '/CESR/CESR_services/dt80_logger', - '/CESR/CESR_services/err_mon', - '/CESR/CESR_services/event_wat', - '/CESR/CESR_services/fastlog', - '/CESR/CESR_services/gpib_serv', - '/CESR/CESR_services/htcmon', - '/CESR/CESR_services/intloc', - '/CESR/CESR_services/logit', - '/CESR/CESR_services/onoff', - '/CESR/CESR_services/per_mag', - '/CESR/CESR_services/rfintl', - '/CESR/CESR_services/sentry', - '/CESR/CESR_services/show', - '/CESR/CESR_services/synring', - '/CESR/CESR_services/vacmon', - '/CESR/CESR_services/xscope', - '/CESR/CESR_progs/magstat', - '/CESR/CESR_services/simcon', - '/trunk/src/tao', - '/trunk/src/lux', - '/trunk/src/bmadz', - '/trunk/src/bsim', - '/CESR/CESR_progs/synchv', - '/CESR/CESR_progs/cesrv', - '/trunk/src/regression_tests', - '/trunk/src/bsim_cesr', - '/CESR/CESR_progs/BPM_tbt_gain', - '/CESR/CESR_progs/cesr_programs', - '/trunk/src/util_programs', - '/CESR/CESR_services/CBIC', - '/trunk/src/code_examples', - '/trunk/src/analyzer', - '/CESR/CESR_progs/xbus_book', - '/CESR/CESR_progs/newin', - '/CESR/CESR_progs/DB_utils', - '/CESR/CESR_progs/chfeed', - '/CESR/CESR_progs/gdl', - '/CESR/CESR_progs/hard', - '/CESR/CESR_progs/lat_utils', - '/CESR/CESR_progs/magnet', - '/CESR/CESR_progs/save', - '/CESR/CESR_progs/vac', - '/CESR/CESR_progs/crf', - '/CESR/CESR_progs/srf', - '/CESR/CESR_progs/univ_tune_tracker', - '/CESR/CESR_services/console', - '/CESR/CESR_services/winj', - '/CESR/CESR_services/daily', - '/CESR/CESR_services/xetec', - '/CESR/CESR_services/webrep', - '/CESR/CESR_services/bcmserv', - '/CESR/CESR_services/moore232', - '/CESR/CESR_services/mooreenet', - '/CESR/CESR_services/lt107_mon', - '/CESR/CESR_services/delphi', - '/CESR/CESR_services/runlog', - '/CESR/CESR_services/disp_tunes', - '/CESR/CESR_services/gen_log', - '/CESR/CESR_services/bpm_poll', - '/CESR/CESR_services/comet', - '/CESR/CESR_services/powermonitor_check', - '/CESR/CESR_progs/auto_char', - '/CESR/CESR_progs/beam_dose', - '/CESR/CESR_progs/beam_optimizer', - '/CESR/CESR_progs/cbpm_mon', - '/CESR/CESR_progs/dtp', - '/CESR/CESR_progs/electest', - '/CESR/CESR_progs/ethscope', - '/CESR/CESR_progs/fbph', - '/CESR/CESR_progs/gdl_inp', - '/CESR/CESR_progs/grofix', - '/CESR/CESR_progs/inj', - '/CESR/CESR_progs/ldinit', - '/CESR/CESR_progs/linevolt', - '/CESR/CESR_progs/linac', - '/CESR/CESR_services/linmon', - '/CESR/CESR_progs/mugshot', - '/CESR/CESR_progs/nmr_test', - '/CESR/CESR_progs/node_set', - '/CESR/CESR_progs/plottunes', - '/CESR/CESR_progs/res_meas', - '/CESR/CESR_progs/scopeget', - '/CESR/CESR_progs/timing_test', - '/CESR/CESR_progs/tools', - '/CESR/CESR_progs/refresh', - '/CESR/CESR_progs/analyze_transient', - '/CESR/CESR_progs/sig_acq', - '/CESR/CESR_progs/knobs', - '/CESR/CESR_progs/xbus_load', - '/CESR/CESR_progs/moorecon', - '/CESR/CESR_progs/vacmap', -] - -packages_build_list = [ - '/packages/recipes_f-90_LEPP', - '/packages/activemq-cpp-3.7.0', - '/packages/cfortran', - '/packages/num_recipes/recipes_c-ansi', - '/packages/xsif', - '/packages/PGPLOT', - '/packages/plplot', - '/packages/gsl', - '/packages/fgsl', - '/packages/lapack', - '/packages/lapack95', - '/packages/fftw', - '/packages/root', - '/packages/xraylib', - '/packages/openmpi', - '/packages/hdf5', - '/packages/jsonfortran', - '/packages/libzmq', -] - -#----------------------------------------------------- -#----------------------------------------------------- -repository_addresses = { -# 'ACC-CLASSE' : 'https://accserv.classe.cornell.edu/svn', -# 'ACC-CLASSE-local' : '/mnt/svn', - 'ACC-CLASSE' : 'https://accserv.lepp.cornell.edu/svn', - 'ACC-CLASSE-local' : '/mnt/svn', - 'ACC-LEPP' : 'https://accserv.lepp.cornell.edu/svn', - 'ACC-LEPP-local' : '/mnt/svn', - 'GitLab' : 'https://gitlab01.classe.cornell.edu/bmad/' #RT 60649 - } - - -#----------------------------------------------------- -#----------------------------------------------------- -build_specs = { - 'Linux_x86_64_intel-offline' : { - 'type' : 'release', - 'platform' : 'Linux_x86_64_intel', - 'basedir' : offline_base_dir, - 'util_dir' : offline_util_dir, - 'domain' : 'OFFLINE', - 'host' : offline_host, - 'email_list' : release_mail_list, - 'repositories' : { - 'ACC-CLASSE' : release_build_list - } - }, - 'Linux_x86_64_intel-online' : { - 'type' : 'release', - 'platform' : 'Linux_x86_64_intel', - 'basedir' : online_base_dir, - 'util_dir' : online_util_dir, - 'domain' : 'ONLINE', - 'host' : online_host, - 'email_list' : release_mail_list, - 'repositories' : { - 'ACC-CLASSE' : release_build_list - } - }, - 'Linux_x86_64_intel-local' : { - 'type' : 'release', - 'platform' : 'Linux_x86_64_intel', - 'basedir' : local_base_dir, - 'util_dir' : local_util_dir, - 'domain' : 'LOCAL', - 'host' : local_host, - 'email_list' : local_mail_list, - 'repositories' : { - 'ACC-CLASSE' : release_build_list - } - }, - 'Linux_x86_64_gfortran-offline' : { - 'type' : 'release', - 'platform' : 'Linux_x86_64_gfortran', - 'basedir' : offline_base_dir, - 'util_dir' : offline_util_dir, - 'domain' : 'OFFLINE', - 'host' : offline_host, - 'email_list' : release_mail_list, - 'repositories' : { - 'ACC-CLASSE' : release_build_list - } - }, - 'Linux_x86_64_gfortran-online' : { - 'type' : 'release', - 'platform' : 'Linux_x86_64_gfortran', - 'basedir' : online_base_dir, - 'util_dir' : online_util_dir, - 'domain' : 'ONLINE', - 'host' : online_host, - 'email_list' : release_mail_list, - 'repositories' : { - 'ACC-CLASSE' : release_build_list - } - }, - 'Linux_x86_64_gfortran-local' : { - 'type' : 'release', - 'platform' : 'Linux_x86_64_gfortran', - 'basedir' : local_base_dir, - 'util_dir' : local_util_dir, - 'domain' : 'LOCAL', - 'host' : local_host, - 'email_list' : local_mail_list, - 'repositories' : { - 'ACC-CLASSE' : release_build_list - } - }, - 'packages_intel' : { - 'type' : 'packages', - 'platform' : 'Linux_x86_64_intel', - 'basedir' : offline_base_dir, - 'util_dir' : offline_util_dir, - 'domain' : 'OFFLINE', - 'host' : offline_host, - 'email_list' : release_mail_list, - 'repositories' : { - 'ACC-CLASSE' : packages_build_list - } - }, - 'packages_intel-local' : { - 'type' : 'packages', - 'platform' : 'Linux_x86_64_intel', - 'basedir' : local_base_dir, - 'util_dir' : local_util_dir, - 'domain' : 'LOCAL', - 'host' : local_host, - 'email_list' : local_mail_list, - 'repositories' : { - 'ACC-CLASSE' : packages_build_list - } - }, - 'packages_gfortran' : { - 'type' : 'packages', - 'platform' : 'Linux_x86_64_gfortran', - 'basedir' : offline_base_dir, - 'util_dir' : offline_util_dir, - 'domain' : 'OFFLINE', - 'host' : offline_host, - 'email_list' : release_mail_list, - 'repositories' : { - 'ACC-CLASSE' : packages_build_list - } - }, - 'packages_gfortran-local' : { - 'type' : 'packages', - 'platform' : 'Linux_x86_64_gfortran', - 'basedir' : local_base_dir, - 'util_dir' : local_util_dir, - 'domain' : 'LOCAL', - 'host' : local_host, - 'email_list' : local_mail_list, - 'repositories' : { - 'ACC-CLASSE' : packages_build_list - } - } -} diff --git a/util/build_mgmt.py b/util/build_mgmt.py deleted file mode 100644 index 5a02202934..0000000000 --- a/util/build_mgmt.py +++ /dev/null @@ -1,167 +0,0 @@ -#------------------------------------------------------------ -# Common code shared among several build management scripts -# for getting lists of -# -platform directory items -# -full path names of platform directory items -# -linknames for promoted builds (releases) -# -build names of promoted builds -# -# To employ in a program place -# 'import build_mgmt' at beginning of source. -#------------------------------------------------------------ - -import os - -platforms = ['Linux_x86_64_intel'] -#platforms = ['Linux_x86_64_intel', 'Linux_x86_64_gfortran', 'packages_intel', 'packages_gfortran'] -promotion_labels = ['devel', 'current'] -release_prefix = 'cesr' -builds_basedir = '/nfs/acc/libs/' - -diritems = {} -for platform in platforms: - diritems[platform] = [] - -fulldiritems = {} -for platform in platforms: - fulldiritems[platform] = [] - -prolinks = {} -for platform in platforms: - prolinks[platform] = [] - -builds = {} -for platform in platforms: - builds[platform] = [] - -releases = {} -for platform in platforms: - releases[platform] = [] - - -active_releases = {} -for platform in platforms: - active_releases[platform] = {} - for promotion_label in promotion_labels: - active_releases[platform][promotion_label] = '' - - -for platform in platforms: - subdirs = [] - protected_releases = [] - basedir = builds_basedir + platform - diritems[platform] = os.listdir(basedir) - diritems[platform].sort() - for item in diritems[platform]: - item = basedir + '/' + item - fulldiritems[platform].append(item) - subdirs = os.listdir(basedir) - subdirs.sort() - for item in fulldiritems: - if release_prefix in item: - builds[platform].append(item) - - # Get all promoted release names - for fulldiritem in fulldiritems[platform]: - for promotion_label in promotion_labels: - truename = '' - if promotion_label in fulldiritem: - prolinks[platform].append(os.path.split(fulldiritem)[1]) - truename = os.readlink(fulldiritem) - releases[platform].append(truename) - if os.path.split(fulldiritem)[1] == promotion_label: - active_releases[platform][promotion_label] = truename - - -# These methods provide access to information about the release archive contents -# harvested by the above code or obtained by their own execution. - -def Platforms(): - """Returns a list of all supported platforms.""" - return platforms - -def DirItems(): - """Returns a dictionary of all library archive platform - directory items (short names) for each supported platform.""" - return diritems - -def FullDirItems(): - """Returns a dictionary of all library archive platform - directory items with full pathnames for each supported platform.""" - return fulldiritems - -def Prolinks(promotion_label): - """Returns a dictionary of all promoted build (release) - symlink names for each supported platform.""" - return prolinks - -def NewLinkName(promotion_label): - """Return the name of the next link name to be used when archiving - a previously active release directory for the given promotion label. - This picks the highest numerical portion of the promotion label - type found in all supported platform directories and then adds 1 to - it to compose the new archival promotion label. - Ex. - Highest old archival release label in platform dir 1 = 'devel_123' - Highest old archival release label in platform dir 2 = 'devel_173' - (This mismatch is unlikely to happen, but it is supported.) - New link name returned = 'devel_174' and can be used for - archival rotation in all supported platform directories. - """ - latestlinks = {} - highval = 0 - for platform in platforms: - latestlinks[platform] = '' - for link in prolinks[platform]: - if promotion_label in link and '_' in link and \ - link > latestlinks[platform]: - latestlinks[platform] = link - try: - value = int(latestlinks[platform].split('_')[1])+1 - if value > highval: - highval = value - except IndexError: - pass - return promotion_label+'_'+str(highval) - -def Builds(): - """Returns a dictionary of all archived builds, whether or - not they have ever been promoted to a release, for each - supported platform.""" - return builds - -def Releases(): - """Returns dictionary of all releases, active and past, - for each supported platform.""" - return releases - -def ActiveRelNames(promotion_label): - """Return dictionary of true release names for the given - promotion label (typically 'current' or 'devel', defined - above) for each supported platform.""" - names = {} - for platform in platforms: - names[platform] = active_releases[platform][promotion_label] - return names - -def RelNameConsistency(promotion_label): - """Returns True if the true release name for the given - promotion label is identical across all supported platforms, - returns False otherwise.""" - names = [] - for platform in platforms: - names.append(active_releases[platform][promotion_label]) - if len(set(names)) == 1: - return True - else: - return False - -def BuildExists(buildname): - """Returns True if the specified build name is present - in all supported platform archive directories, False - otherwise.""" - for platform in platforms: - if not os.path.exists(builds_basedir+'/'+platform+'/'+buildname): - return False - return True - diff --git a/util/build_supervisor b/util/build_supervisor deleted file mode 100755 index dfc9da65b0..0000000000 --- a/util/build_supervisor +++ /dev/null @@ -1,512 +0,0 @@ -#!/usr/bin/python -#-*-python-*- -# 2.7.5 -# -# The default packages build to use when constructing a new -# release build is determined by a symlink in the archive -# area for each platform. -# -# If a --pkg option is specified on the -# command line, then that packages build will be used -# when constructing the release build. -#------------------------------------------------------------------ -import os -import sys -import time -import socket -import argparse -import ConfigParser -import subprocess as sub -import logging -import threading -import datetime - -# Get 'repositories' and 'source_trees' dictionaries. -import build_config - -# Get kerberos key for remote execution -hostname = socket.gethostname() -#p = sub.Popen('kinit -k -t ~/etc/cesrulib-keytab cesrulib', -p = sub.Popen('kinit -k -t /home/$USER/etc/$USER-keytab $USER', - bufsize=1, - shell=True, - stdout=sub.PIPE ) - -# Print status of the script files with respect to the svn repository. -utildir = os.path.abspath(os.path.dirname(sys.argv[0])) -print 'SVN Status for util directory:' -os.system('svn st -u ' + utildir) - -print '\nSVN Status for build_system directory:' -os.system('svn st -u ' + utildir + '/../build_system') - -while True: - nextline = p.stdout.readline() - if nextline == '' and p.poll() != None: - break - sys.stdout.write(nextline) - sys.stdout.flush() - -# Logger -output_verbosity = logging.WARNING -logger = logging.getLogger('builder') -logger.setLevel(output_verbosity) -# Logger Console handler -loghandler = logging.StreamHandler() -loghandler.setLevel(output_verbosity) -# Logger formatter -formatter = logging.Formatter('%(name)s - %(message)s') -# Logger add formatter to handler -logger.addHandler( loghandler ) - - -# Collect all supported platform names -all_platforms = [] -for spec in build_config.build_specs: - all_platforms.append( build_config.build_specs[spec]['platform'] ) -# Remove any duplicate platforms from list -all_platforms = list(set(all_platforms)) -#print 'Platforms that appear in collection of build specifications:' -#print all_platforms - - -argparser = argparse.ArgumentParser(description='Acelerator Libraries Collection Build Tool') - -argparser.add_argument('--pkgname', - action='store', - dest='pkg_name') -argparser.add_argument('--gmakedir', - action='store', - dest='gmake_dir') -argparser.add_argument('--utildir', - action='store', - dest='util_dir') -argparser.add_argument('--spec', - action='store', - dest='build_spec', - help='Build specification') -argparser.add_argument('--nightly', - action='store_true', - dest='nightly') -argparser.add_argument('--online', - action='store_true', - dest='build_online_release', - help='Build the collection of libraries and programs for CESR online use.') -argparser.add_argument('--local', - action='store_true', - dest='local', - help='Build using local paths and resources.') -argparser.add_argument('--packages', - action='store_true', - dest='packages', - help='Build the collection of external Packages used for CESR online.') -muxgroup = argparser.add_mutually_exclusive_group() -muxgroup.add_argument('--intel', - action='store_true', - dest='intel', - help='Build using the Intel ifort compiler.') -muxgroup.add_argument('--gfortran', - action='store_true', - dest='gfortran', - help='Build using the GCC gfortran compiler.') - - -argresults = argparser.parse_args() - -build_online_release = argresults.build_online_release - -local = argresults.local - -build_packages = argresults.packages - -compiler_intel = argresults.intel - -compiler_gfortran = argresults.gfortran - -ACC_SET_GMAKE_JOBS = '2' - -ACC_ENABLE_FPIC = 'N' - -ACC_FC = 'intel' - -ACC_SET_F_COMPILER = 'ifort' - -build_request = 'release_intel' - - -# Defaults from config file. -makefile_dir_default = build_config.makefile_dir -if build_online_release: - util_dir_default = build_config.online_util_dir -elif local: - util_dir_default = build_config.local_util_dir -else: - util_dir_default = build_config.offline_util_dir - - -if compiler_intel: - ACC_FC = 'intel' - ACC_SET_F_COMPILER = 'ifort' - build_request = 'release_intel' - - if build_online_release: - build_request = 'online-release_intel' - - if build_packages: - build_request = 'packages_intel' - - if local: - build_request = 'local-release_intel' - - if local and build_packages: - build_request = 'local-packages_intel' - - #if build_dist: - #build_request = 'dist_intel' - - -if compiler_gfortran: - ACC_FC = 'gfortran' - ACC_SET_F_COMPILER = 'gfortran' - build_request = 'release_gfortran' - - if build_online_release: - build_request = 'online-release_gfortran' - - if build_packages: - build_request = 'packages_gfortran' - - if local: - build_request = 'local-release_gfortran' - - if local and build_packages: - build_request = 'local-packages_gfortran' - - #if build_dist: - #build_request = 'dist_gfortran' - - -class BuildController(threading.Thread): - - # Defaults - type = '' - spec = '' - platform = '' - name = '' - full_release_dir = '' - - svn_revision = 'HEAD' - packages_name = 'packages' - test_suite_run = 'False' - util_dir = util_dir_default - makefile_dir = makefile_dir_default - - if local: - release_prefix = 'cesr-local' - else: - release_prefix = 'cesr' - - prefixes = { - 'release' : release_prefix, - 'packages' : 'packages', - 'dist' : 'CLASSE_accel_dist' - } - # Default numerical precision is 'double': IEEE 754-2008 (8 bytes) - precision = 'double' - - - def __init__( self, spec, custom_name ): - """Initialize the build_controller object.""" - threading.Thread.__init__(self) - - self.spec = spec - self.type = build_config.build_specs[spec]['type'] - self.platform = build_config.build_specs[spec]['platform'] - self.basedir = build_config.build_specs[spec]['basedir'] - self.util_dir = build_config.build_specs[spec]['util_dir'] - self.email_list = build_config.build_specs[spec]['email_list'] - self.nightly = argresults.nightly - self.local = argresults.local - self.intel = argresults.intel - self.gfortran = argresults.gfortran - - if not self.is_platform_supported(): - print 'Quitting.' - sys.exit(1) - if custom_name != '': - self.name = custom_name - self.uniqify_build_name() - else: - self.generate_build_name() - self.full_release_dir = self.basedir + '/' + platform + '/' + self.name - self.log_dir = self.basedir + '/' + platform + '/log' - if not os.path.exists( self.log_dir ): - print 'Log directory ' + self.log_dir + ' does not exist! Quitting.' - sys.exit(1) - self.full_logname = self.log_dir + '/' + self.name + '.log' - self.hostname = build_config.build_specs[spec]['host'] - self.checkout_manifest = [] - - - def does_buildname_exist(self): - """Determine if the currently assigned name of this build - exists anywhere in the collection of supported platform - directories. - - Scan through all defined platforms and check for the - presence of a build directory that matches the name - provided or generated. - If the name checked exists in ANY platform base - directories, modify the name with a counter and - check again. - Keep incrementing the counter and checking until - the name does not exist in any platform base - directory.""" - already_exists = False - for platform in all_platforms: - checkdir = self.basedir + '/' + platform - checkname = checkdir + '/' + self.name - message = 'Checking for slot in ' + checkname + '... ' - if os.path.exists( checkname ): - already_exists = True - message = message + 'ALREADY EXISTS' - logger.debug(message) - else: - message = message + 'OK' - logger.debug(message) - return already_exists - - - def generate_build_name(self): - """Generate a new build name from scratch based on - the build type requested and the date. This will - provide a name unique across all supported platform - directories.""" - timetuple = time.localtime() - year = str(timetuple[0]) - if timetuple[1] < 10: # Clean this up with leading zeroes - month = '0' + str(timetuple[1]) # in original date retrieval? - else: - month = str(timetuple[1]) - if timetuple[2] < 10: - day = '0' + str(timetuple[2]) - else: - day = str(timetuple[2]) - datecode = year + '_' + month + day - self.name = self.prefixes[self.type] + '_' + datecode + '_d' - self.uniqify_build_name() - - - def uniqify_build_name(self): - """Make the build name unique by appending a counter - to make it such if the build name at the time of - invocation of this method already exists anywhere - in the collection of supported platform directories.""" - count = 1 - orig_name = self.name - while self.does_buildname_exist(): - self.name = orig_name + '_' + str(count) - count = count + 1 - - - def is_platform_supported(self): - if self.platform in all_platforms: - if os.path.exists( self.basedir + '/' + self.platform ): - return True - else: - print 'The build specification references the ' + self.platform + \ - ' platform, but a corresponding directory does not exist in ' \ - + self.basedir + '.' - print 'Try creating it and run this again.' - return False - print 'Platform "' + self.platform + '" not mentioned in builder platform specifications.' - return False - - - def write_log_stub(self): - """Write some identifying header fields in the platform-specific log file. - Part of minimal set of methods needed to spawn a build.""" - print 'Opening ' + self.full_logname - self.log = open(self.full_logname, 'w') - self.log.write('build_host ' + self.hostname +'\n') - self.log.write('build_type ' + self.type +'\n') - self.log.write('libs_basedir ' + self.basedir +'\n') - self.log.write('platform ' + self.platform +'\n') - self.log.write('svn_revision ' + str(self.svn_revision) +'\n') - self.log.write('util_dir ' + self.util_dir +'\n') - self.log.write('makefile_dir ' + self.makefile_dir +'\n') - self.log.write('email_list ' + self.email_list +'\n') - self.log.write('build_name ' + self.name +'\n') - self.log.write('full_release_dir ' + self.full_release_dir +'\n') - self.log.write('packages_name ' + self.packages_name +'\n') - self.log.write('nightly ' + str(self.nightly) +'\n') - self.log.write('local ' + str(self.local) +'\n') - self.log.write('intel ' + str(self.intel) +'\n') - self.log.write('gfortran ' + str(self.gfortran) +'\n') - self.write_checkout_manifest() - self.log.write('[builder] - STARTING SUBSCRIPT\n') - self.log.flush() - - - def write_checkout_manifest(self): - for reponame in build_config.build_specs[self.spec]['repositories']: - self.log.write('repository ' + build_config.repository_addresses[reponame] + ' ') - for dir in build_config.build_specs[self.spec]['repositories'][reponame]: - self.log.write(dir + ' ') - self.checkout_manifest.append( dir ) - self.log.write('\n') - self.log.flush() - - - def checkout_files(self): - """Check out all files described in the manifest - associated with this build from the SVN repositories specified in - the configuration file. Get all files from the head revision at the time of - Part of minimal set of methods needed to spawn a build.""" - os.mkdir(self.full_release_dir) - os.chdir(self.full_release_dir) - for reponame in build_config.build_specs[self.spec]['repositories']: - revision = svn_revisions[reponame] - for dir in build_config.build_specs[self.spec]['repositories'][reponame]: - #RT 60649 - if 'git/' == dir[:4]: - repo_address = build_config.repository_addresses['GitLab'] - localdir = dir[4:] - checkout_command = 'git clone --quiet ' + repo_address + \ - localdir + '.git ' + self.full_release_dir + '/' + localdir - print ('CO: ' + checkout_command) - else: - repo_address = build_config.repository_addresses[reponame] - localdir = os.path.split(dir)[1] - # Added klist commands to timestamp SVN access, as per RT#65510 - checkout_command = 'klist -A ; svn -r ' + str(revision) + ' co ' + repo_address + \ - dir + ' ' + self.full_release_dir + '/' + localdir + ' ; klist -A ' - - self.log.write('Checkout command: [' + checkout_command + ']\n') - - proc = sub.Popen(checkout_command, - shell=True, - stdin=sub.PIPE, - stdout=sub.PIPE, - stderr=sub.STDOUT ) - lines = proc.stdout.readlines() - for line in lines: - self.log.write(line) - - - def get_packages_name(self): - """Obtain the default packages name to use for the build.""" - pass - - - def set_test_suite_run(self, request): - self.test_suite_run = request - - - def run(self): - """Execute the build process.""" - self.write_log_stub() - self.checkout_files() - build_command = ("ssh " + self.hostname + \ - " 'export ACC_SET_GMAKE_JOBS=" + ACC_SET_GMAKE_JOBS + \ - " ; export ACC_SET_F_COMPILER=" + ACC_SET_F_COMPILER + \ - " ; export ACC_ENABLE_FPIC=" + ACC_ENABLE_FPIC + \ - " ; [ -e /opt/rh/python27/enable ] && . /opt/rh/python27/enable" \ - " ; python " + self.util_dir + "/builder " + self.full_logname + "'") - print build_command - p = sub.Popen(build_command, bufsize=-1, shell=True) - p_out = p.communicate()[0] - - - - - -#----------------------- -# Non-class code begins -#----------------------- - - - - - -#--------------------------------------- -# Create and set up a build controller -# for each platform build requested. -#--------------------------------------- -builds = [] -svn_revisions = {} - -def head_svn_revision(repository_name): - repository_URL = build_config.repository_addresses[repository_name] - rev_command = 'svn info ' + repository_URL + ' -r HEAD | grep Revision: | cut -c11-' - proc = sub.Popen(rev_command, - shell=True, - stdin=sub.PIPE, - stdout=sub.PIPE, - stderr=sub.STDOUT ) - revnum = proc.stdout.readline().strip() - return int(revnum) - - -for spec in build_config.build_requests[build_request]: - platform = build_config.build_specs[spec]['platform'] - print 'Setting up build ' + spec + ' on platform ' + platform - custom_name = '' - build = BuildController( spec, custom_name ) - for repo_name in build_config.build_specs[spec]['repositories']: - if repo_name not in svn_revisions: - svn_revisions[repo_name] = head_svn_revision(repo_name) - build.svn_revisions = svn_revisions - builds.append(build) -print 'SVN revisions:' -print svn_revisions - - - -#--------------------------------------- -# Override parameters of some or all of -# the builds here, if necessary. -# makefile directory, svn_revision, -# packages directory to use, etc... -#--------------------------------------- -if argresults.pkg_name is not None: - print '\nA custom packages name was provided.' - print ' Using "' + argresults.pkg_name + '" instead of the default.\n' - for build in builds: - build.packages_name = argresults.pkg_name - -if argresults.gmake_dir is not None: - print '\nA custom Gmake directory was provided.' - print ' Using "' + argresults.gmake_dir + '" instead of the default.\n' - for build in builds: - build.makefile_dir = argresults.gmake_dir - -if argresults.util_dir is not None: - print '\nA custom util directory was provided.' - print ' Using "' + argresults.util_dir + '" instead of the default.\n' - for build in builds: - build.util_dir = argresults.util_dir - - -#--------------------------------------- -# Initiate all builds, each in a thread -# of its own, and get the name of the -# build common to all for this script's -# use. -#--------------------------------------- -build_name = builds[0].name - -for bnum, build in enumerate(builds): - for repo_name in build.svn_revisions: - revision = build.svn_revisions[repo_name] - build.start() # Method inherited from Thread class; - # invokes 'run' method in a separate thread. - - -time.sleep(10) -print '\n\nBuild name is: ' + build_name -print "Waiting..." -time.sleep(15) -print '\n\nStill alive...' - -# diff --git a/util/builder b/util/builder deleted file mode 100755 index f93cb7a3c5..0000000000 --- a/util/builder +++ /dev/null @@ -1,252 +0,0 @@ -#!/opt/rh/python27/root/usr/bin/python -#-*-python-*- -# Accepts full build logfile name as only argument. -# i.e. -# 'builder /nfs/acc/libs/Linux_i686_ifort/log/cesr_2011_0907_d_1.log' -#-------------------------------------------------------------- -import sys -import os -from os.path import normpath, basename -import subprocess as sub -import socket - -logfile = sys.argv[1] - -file = open(logfile, 'r+') -inlines = file.readlines() -checkout_manifest = {} - - -print 'BUILDER SCRIPT RUNNING on host: ' + socket.gethostname() - -#-------------------------------------------------------------- -# Extract header values from log file to use as control inputs. -# Any header field found becomes a variable in the class -# 'invars' and can be accessed afterwards via the syntax -# 'invars.'. -#-------------------------------------------------------------- -class invars(): - pass - -for line in inlines: - boolean_map = {'True':True, 'False':False} - if '[builder]' in line: - break - if 'repository' in line: - repo = line.split()[1] - checkout_manifest[repo] = line.split()[2:] - else: - var = line.split()[0] - val = line.split()[1].strip() - if val in boolean_map: - val = boolean_map[val] - setattr(invars, var, val) - - - -# Close file and reopen in append mode. -file.close() -file = open(logfile, 'a', 1) # unbuffered, needed? - -sys.stdout = file - -hostname = socket.gethostname() -#p = sub.Popen('kinit -k -t ~/etc/cesrulib-keytab cesrulib', -p = sub.Popen('kinit -k -t /home/$USER/etc/$USER-keytab $USER', - bufsize=1, - shell=True, - stdout=sub.PIPE ) -while True: - nextline = p.stdout.readline() - if nextline == '' and p.poll() != None: - break - sys.stdout.write(nextline) - sys.stdout.flush() - -#print 'Shell Environment Dump:' -#for envvar in os.environ: -# print envvar + ' = ' + os.environ[envvar] - - -def manifest_to_build_list( manifest ): - """Turn list of repository check-out paths into a - simple list of buildable directories.""" - build_list = [] - for repo in manifest: - for dir in checkout_manifest[repo]: - full_dir = normpath(invars.full_release_dir) + '/' + basename(normpath(dir)) - if os.path.exists(full_dir + '/CMakeLists.txt'): - build_list.append(full_dir) - if os.path.exists(full_dir + '/acc_build'): - build_list.append(full_dir) - return build_list - - -def link_to_packages( packages_name ): - """Create a symbolic link in the release directory - called 'packages' to the packages area named in - the build setup.""" - full_packages_dir = invars.libs_basedir+'/'+invars.platform+'/'+packages_name - if os.path.islink( full_packages_dir ): - true_packages_name = '../'+os.readlink(full_packages_dir) - sys.stdout.write( '\nREADLINK on packages_dir = ' + full_packages_dir +'\n') - else: - true_packages_name = '../'+packages_name - sys.stdout.write('Setting link to packages: ' + true_packages_name+'\n') - sys.stdout.flush() - os.symlink( true_packages_name, invars.full_release_dir+'/packages' ) - - -#def determine_build_order( ): -#"""Examine all source code to build and come up with -# optimum order of directories to visit.""" - - -def build_directory( dir, statlist, target ): - print '\n\n\n-------- Building: ' + dir - os.chdir( dir ) - use_32bit = ' ' - if 'lnx209' in hostname: - use_32bit = ' ACC_FORCE_32_BIT=Y; ' - if invars.intel: - ACC_SET_F_COMPILER = 'ifort' - if invars.gfortran: - ACC_SET_F_COMPILER = 'gfortran' - else: - ACC_SET_F_COMPILER = 'ifort' - - if 'XbsmAnalysis' in dir: - use_gcc482 = ' ; [ -e /opt/rh/devtoolset-2/enable ] && source /opt/rh/devtoolset-2/enable ' - else: - use_gcc482 = '' - - ACC_SET_GMAKE_JOBS = '2' - - if 'cbpmfio' in dir: - ACC_SET_GMAKE_JOBS = '2' - if 'xetec' in dir: - ACC_SET_GMAKE_JOBS = '1' - - ACC_ENABLE_FPIC = 'N' - - make_command = 'mk' - if target == 'debug': - make_command = 'mkd' - build_command = 'export ACCLIB='+ invars.build_name + \ - ' ; export ACC_SET_GMAKE_JOBS=' + ACC_SET_GMAKE_JOBS + \ - ' ; export ACC_SET_F_COMPILER=' + ACC_SET_F_COMPILER + \ - ' ; export ACC_ENABLE_FPIC=' + ACC_ENABLE_FPIC + \ - ' ; export UTIL_DIR_REQUEST=' + invars.util_dir + \ - ' ; export OFFLINE_LOCAL_ARCHIVE_BASE_DIR=' + invars.libs_basedir + \ - ' ; source ' + invars.util_dir + '/acc_vars.sh' \ - + use_gcc482 + \ - ' ; export PATH=/usr/local/bin:$PATH' \ - ' ; cmake --version ' \ - ' ; export ACC_BUILD_EXES=Y ; export ACC_ENABLE_SHARED=Y ; env | grep ACC ; ' + make_command - print '-------- Using Build Command: ' + build_command - - p = sub.Popen(build_command, - bufsize=1, - shell=True, - stdout=sub.PIPE, - stderr=sub.STDOUT ) - while True: - nextline = p.stdout.readline() - if nextline == '' and p.poll() != None: - print 'RETURN CODE ===> [' + str(p.returncode) + ']' - statlist.append( [dir, p.returncode] ) - break - sys.stdout.write(nextline) - sys.stdout.flush() - - -def build_pkg_directory( dir, statlist, target ) : - print 'Build pkg directory here...' - -#--------------------------- - - - -link_to_packages( invars.packages_name ) - -blist = manifest_to_build_list( checkout_manifest ) - -targets = ['production', 'debug'] - -buildpass_summaries = {} - -for buildpass, target in enumerate(targets): - print '\n\n-----------------------------------' - print target + ' pass ('+ str(buildpass+1) +' of ' + str(len(targets)) + ')' - print '-----------------------------------' - summary = [] - for dir in blist: - build_directory( dir, summary, target ) - buildpass_summaries[target] = summary - - print '\n' - print target + ' build pass status summary:' - for entry in summary: - print str(entry[0]) + ' : ' + str(entry[1]) - - sys.stdout.flush() - - -# Create a condensed pass summary giving success/failure -# info for each build pass that took place. -print 'Condensed pass summary:' -all_OK = {} -for buildpass, target in enumerate(targets): - all_OK[target] = True - for entry in buildpass_summaries[target]: - if entry[1] != 0: - all_OK[target] = False - if all_OK[target]: - print target + ' : OK' - set_nightly_link = True - else: - print target + ' : ERROR' - error_log_message_cmd = 'grep -C 10 Error ' + logfile -# mail_command = error_log_message_cmd + ' | /bin/mail -s "Nightly build error" cesrulib@cornell.edu' - mail_command = error_log_message_cmd + ' | /bin/mailx -s "Nightly build error" ' + invars.email_list - p = sub.call(mail_command, - bufsize=1, - shell=True) - - - - -# If all passes succeeded, AND a nighly build was requested -# from the build_supervisor, then rotate the nightly link. -# Create searcf_namelist index files. -if invars.nightly: - rotate_nightly = True - for entry in all_OK: - if not all_OK[entry]: - rotate_nightly = False - rename_error_build_command = 'mv ' +invars.libs_basedir+'/'+invars.platform+'/'+invars.build_name+' '+invars.libs_basedir+'/'+invars.platform+'/'+invars.build_name+'.BAD' - rename_error_log_command = 'mv ' + logfile +' '+invars.libs_basedir+'/'+invars.platform+'/log/'+invars.build_name+'.BAD.log' - print 'Renaming failed build to '+invars.build_name+'.BAD' - p = sub.call(rename_error_build_command, - bufsize=1, - shell=True) - p = sub.call(rename_error_log_command, - bufsize=1, - shell=True) - break - - if rotate_nightly: - print 'Rotating nightly link...' - print invars.libs_basedir+'/'+invars.platform+'/nightly' - if os.path.lexists(invars.libs_basedir+'/'+invars.platform+'/nightly'): - print 'Nightly link exists.' - os.remove(invars.libs_basedir+'/'+invars.platform+'/nightly') - os.symlink( invars.build_name, invars.libs_basedir+'/'+invars.platform+'/nightly' ) - print 'Creating searchf_namelist index files.' - sub.call(["/nfs/acc/libs/util/create_searchf_namelist", "-r", invars.libs_basedir+'/'+invars.platform+'/'+invars.build_name ]) - -if invars.build_type == "packages": - remove_packages_link_command = 'rm ' +invars.libs_basedir+'/'+invars.platform+'/'+invars.build_name+'/packages' - p = sub.call(remove_packages_link_command, - bufsize=1, - shell=True) diff --git a/util/purge_releases b/util/purge_releases deleted file mode 100755 index 1c6f8ec6c7..0000000000 --- a/util/purge_releases +++ /dev/null @@ -1,263 +0,0 @@ -#!/opt/rh/python27/root/usr/bin/python -#--------------------------------------------------- -# This script deletes all releases retaining only those that meet -# the retention criteria defined in the -# /home/cesrulib/bin/util/BUILD_SYSTEM.conf file. -# -#------------------------------------------------------------- -# The program does the following: -# 1) Get a list of all the releases of the type cesr_* -# based on log files from the directory /nfs/acc/libs/log -# 2) Get an array(s) for the releases determining their age -# 3) Do this for each release: -# 4) Determine if a release needs to be deleted -# 5) Delete the release if necessary using the -# /home/cesrulib/bin/util/DELETE_release script -#-------------------------------------------------------------- -# Author: Siarhei Vishniakou / Matt Rendina -# Date: Late November 2007 -#-------------------------------------------------------------- - -import string, os, sys, time, math, ConfigParser -from string import ljust - -################ -# Set up configuration parser -################ -cfg_file = "/home/cesrulib/bin/util/BUILD_SYSTEM.conf" -config = ConfigParser.ConfigParser() -config.optionxform = lambda x: x -config.read(cfg_file) -################ -# Get values -################ - -UTIL_DIR = config.get("Paths","UTIL_DIR") -CURRENT_YEARS = int(config.get("Retention","CURRENT_YEARS")) -CURRENT_MONTHS = int(config.get("Retention","CURRENT_MONTHS")) -CURRENT_DAYS = int(config.get("Retention","CURRENT_DAYS")) - - -#### Initialization -#-------------------------------------------------------------------------- - -releases = [] #array to hold release names -prefix = "cesr" #the mask to discriminate releases. Idea: to get everything that starts with "cesr" -devel_link_prefix = "devel" # All symlinks to devel releases start with this -current_link_prefix = "current" # All symlinks to current releases start with this -log_dir = config.get("Paths","LOG_DIR") #directory with the .genlog files -delete_util_path = UTIL_DIR+"/DELETE_release" -log_file = log_dir + "/purge_releases_log.txt" - - -current_span = [ CURRENT_YEARS, CURRENT_MONTHS, CURRENT_DAYS] # Lifespan for current -devel_span = int(config.get("Retention","DEVEL_SPAN")) # Starting this many currents ago, all devels will be kept - - -devel_age = [0, 0, 0] # Age of oldest DEVEL to be retained -timehere = time.localtime() - -#-------------------------------------------------------------------------------- -# Release dictionary: -# Keys are release names. Each key has a 5-item list holding -# information relevant to that release, all components form an 'entry'. -# -# "release_name" : [age_years, age_months, age_days, release_type, deletion flag] -#--------------------------------------------------------------------------------- -rel_entries = {} - -#-------------------------------------------------------------------------------- - -print "\npurge_releases:" -print "------------------" -print "All 'CURRENT' releases younger than "+str(current_span[0])+" year(s), "+str(current_span[1]) \ - +" month(s), and "+str(current_span[2])+" day(s) are kept." -print "All devels within the last "+str(devel_span)+" 'CURRENT's are kept." -print "Everything else gets removed." -print "_______________________________________________________" - - - -# Get a list of all the releases of the type cesr_* based on log files -output = os.listdir(log_dir) -for line in output: - if line[0:len(prefix)] == prefix: - line = line[:-7] # Remove the .genlog extension - rel_entries[line] = [-1, -1, -1, "--", "NO"] - - - -################ -# Here we establish the age of each release (rough estimate only - using 365 days/year, 30 days/month) -################ - -for rel in rel_entries: - time_rel= int(rel[5:9]), int(rel[10:12]), int(rel[12:14]) ,0 ,0 ,0 ,0 ,0 ,0 - - time_diff = time.mktime(timehere) - time.mktime(time_rel) - years = int(math.floor(time_diff/(60*60*24*365))) # how many years old is the release - rel_entries[rel][0] = years - - time_diff = time_diff-(60*60*24*365) * years - months = int(math.floor(time_diff/(60*60*24*30))) # how many months old - rel_entries[rel][1] = months - - time_diff = time_diff-(60*60*24*30) * months - days = int(math.floor(time_diff/(60*60*24))) # how many days old - rel_entries[rel][2] = days - - - - -############### -# Determine the status of each release ("devel", "current", or "none") -############### -### To do this: -### 05) Get a list of all releases in the repository path https:/accserv.lepp.cornell.edu/svn/tags -### using svn list https://accserv.lepp.cornell.edu/svn/tags -### 06) Strip the end of the line symbol and the "/" symbol -### 07) Establish whether a release is found in repository in the /tags directory -### 08) If it is found, determine it's status (devel, current, or none). If not found, give it status none - - -devel_list = [] -for line in os.listdir("/home/cesrulib/acc_libs/Linux_i686_intel"): - if os.path.islink("/home/cesrulib/acc_libs/Linux_i686_intel/"+line) and line[:5] == 'devel': - devel_list.append( os.readlink("/home/cesrulib/acc_libs/Linux_i686_intel/"+line) ) - - -current_list = [] -for line in os.listdir("/home/cesrulib/acc_libs/Linux_i686_intel"): - if os.path.islink("/home/cesrulib/acc_libs/Linux_i686_intel/"+line) and line[0:7] == 'current': - current_list.append( os.readlink("/home/cesrulib/acc_libs/Linux_i686_intel/"+line) ) - - - -# Determine which releases in the master release list are DEVEL releases -# do this first, it can be overridden below if a release was promoted a -# second time to CURRENT. -for rel in rel_entries: - for name in devel_list: - if rel == name: - rel_entries[rel][3] = 'DEVEL' - -for rel in rel_entries: - for name in current_list: - if rel == name: - rel_entries[rel][3] = 'CURRENT' - - - -##### At this point, all releases that have a log file in /nfs/acc/libs/log have been -##### gathered and identified as "current", "devel", or "none" -##### Also, their ages have been determined -##### Next we need: -##### 10) Find a value for devel_age variable (see description on top) -##### 20) Determine the "delete" array (see description) -##### - - -# Sort the DEVEL and CURRENT lists according to date in name -devel_list.sort() -current_list.sort() -# Sort names of main release list for later sorted printing -rels_sorted = rel_entries.keys() -rels_sorted.sort() -rels_sorted.reverse() - -# Step 10) -num_current = len(current_list) -print "There are "+str(num_current)+" 'CURRENT' releases in the archive.\n" -current_list.reverse() # Now in newest-to-oldest order - -current_boundary = current_list[devel_span-1] -devel_age = [ rel_entries[current_boundary][0], rel_entries[current_boundary][1], rel_entries[current_boundary][2] ] - -print "The archived 'CURRENT' release after which all 'DEVELs' will be kept is " + current_boundary -print "This release is "+str(devel_age[0])+" years, "+str(devel_age[1])+" months, "+str(devel_age[2])+" days old." -print "All non-current releases older than this will be deleted.\n" - - -########################################################## -########## -########## This is the removal criteria being implemented: (step 20) -########## -########################################################### -time_devel_age = time.mktime( devel_age + [0,0,0,0,0,0] ) -time_current_age = time.mktime( current_span + [0,0,0,0,0,0] ) - -for rel in rel_entries: - rel_time = time.mktime( rel_entries[rel][0:3] + [0,0,0,0,0,0] ) - # If older than oldest retained CURRENT release, flag for deletion - if (rel_time - time_current_age) > 0: - rel_entries[rel][4] = 'YES' - # If not current, and older than devel_age, flag. - elif (rel_entries[rel][3] != "CURRENT") and (rel_time - time_devel_age > 0): - rel_entries[rel][4] = 'YES' - # If not CURRENT or DEVEL and older than 2 days, flag. - elif (rel_entries[rel][2] >= 2) and (rel_entries[rel][3] != "CURRENT") and (rel_entries[rel][3] != "DEVEL"): - rel_entries[rel][4] = 'YES' - - - -# Tally up the number of releases flagged for deletion -flagged = 0 -for rel in rel_entries: - if (rel_entries[rel][4] == 'YES'): - flagged = flagged + 1 - - -# Print a summary table of release names, ages, types and deletion flag. -# Write same results to log file. -#------------------------------------------------------------------------ -for ppass in range(2): - if ppass == 1: - log = open(log_file,'w') - sys.stdout = log # Redirect prints for this pass to log file. - - print time.strftime('%X %x %Z') - - if flagged == 0: - print "There are no releases to be deleted." - else: - print "A total of "+str(flagged)+" release(s) will be deleted." - - print "" - print " Age " - print " Name (y, m, d) Type DELETE? " - print "-----------------------------------------------" - - for rel in rels_sorted: - yr = str(rel_entries[rel][0]) - mo = str(rel_entries[rel][1]) - dy = str(rel_entries[rel][2]) - type = rel_entries[rel][3] - delflag = rel_entries[rel][4] - print ljust(rel, 19), ljust(yr,3), ljust(mo,3), ljust(dy,3), ljust(type,8), delflag - - - -######### -######### Now use the utility "DELETE_release" in the utils folder with -PURGE flag to remove unwanted releases -######### -cmd_output = [] - -print "" -for rel in rel_entries: - if rel_entries[rel][4] == 'YES': - out = os.popen(delete_util_path+" -PURGE REL="+rel).readlines() - cmd_output.append(out) - - -# Dump output of DELETE_release script to logfile only -for line in cmd_output: - print line - - -# Restore print operator back to standard output -sys.stdout = sys.__stdout__ - - - -log.close() - diff --git a/util/rotate_release b/util/rotate_release deleted file mode 100755 index 2eb0593703..0000000000 --- a/util/rotate_release +++ /dev/null @@ -1,269 +0,0 @@ -#!/usr/bin/python -# -*- python -*- -# rotate_release -#-------------------------------------------------------- -import argparse -import time -import sys -import os -import subprocess as sub -import build_mgmt - -#mailing_list_address = 'acc-code-lepp@lnx122.lns.cornell.edu' -mailing_list_address = 'classe-acc-comp-l@list.cornell.edu' -from_address = 'classe-acc-comp-l@list.cornell.edu' - -# Make unique tempfile name to hold e-mail body -mail_tempfile = '/tmp/relrot-'+str(time.time()) - - -# Set up command-line argument handling -argparser = argparse.ArgumentParser(description='Build-to-release rotation tool') -argparser.add_argument('buildname', - type=str, - help='Name of build') -argparser.add_argument('--mail', - action='store_true', - dest='send_mail', - default=False, - help='Send rotation notification message to libraries mailing list.') -argparser.add_argument('--tag', - action='store_true', - dest='create_tag', - default=False, - help='Create a tagged copy of all source and support files in repository under the release\'s name.') - -muxgroup = argparser.add_mutually_exclusive_group() -muxgroup.add_argument('--devel', - action='store_true', - dest='devel', - default=True, - help='DEVEL rotation request') -muxgroup.add_argument('--current', - action='store_true', - dest='current', - default=False, - help='CURRENT rotation request') - -argresults = argparser.parse_args() - -release_type = 'devel' -if argresults.current: - argresults.devel = False - release_type = 'current' - -buildname = argresults.buildname -send_mail = argresults.send_mail - - -# Get local copies of needed collections of values. -platforms = build_mgmt.Platforms() -releases = build_mgmt.Releases() -fulldiritems = build_mgmt.FullDirItems() -builds_basedir = build_mgmt.builds_basedir -active_relnames = build_mgmt.ActiveRelNames(release_type) - -for platform in platforms: - if buildname in releases[platform]: - print 'Note: ['+platform+']'+ ' Build has already been promoted' - - -# Verify that the specified release is promoted to the same rank in -# on all supported build platforms. -if build_mgmt.RelNameConsistency(release_type): - old_releasenames = build_mgmt.ActiveRelNames(release_type) - activenames = [] - for platform in old_releasenames: - activenames.append(old_releasenames[platform]) - if len(set(activenames)) != 1: - print '\n\nThere is a mismatch between the active release across' - print 'the collection of supported platforms.' - for platform in old_releasenames: - print ' '+platform+' : '+old_releasenames[platform] - print 'Please address this before continuing with a' - print 'rotation,or perform the release rotation manually.\n' - sys.exit(1) - else: - old_releasename = activenames[0] - #print old_releasename - - -# Verify that the build directory exists in all supported build -# platform directories. -if not build_mgmt.BuildExists(buildname): - print 'Build name ' + buildname + ' does not exist in all' - print 'of the following archive directories.' - for platform in platforms: - print platform - sys.exit(1) - - -def send_notification_email(old_relname): - # Make unique tempfile name to hold e-mail body - mail_tempfile = '/tmp/relrot-'+str(time.time()) - - time_of_day = 'morning' - hour = time.localtime()[3] - if hour >= 12: - time_of_day = 'afternoon' - - f = open(mail_tempfile, 'w') - f.write('Good '+ time_of_day +' everyone,\n\n') - f.write('The accelerator physics libraries have been rotated as follows:\n\n') - f.write(release_type.upper() + ' release rotation for:\n') - for platform in platforms: - f.write(' ' + platform + '\n') - f.write('\n') - f.write('Old: ' + old_relname +' --> New: '+ buildname +'\n\n') - f.write('\n') - f.write(' -Accelerator Code Librarian\n\n') - f.write('--------------------------------------------------------------------------------\n') - f.write('Accelerator Code Support Links -\n') - f.write('Documentation : https://wiki.classe.cornell.edu/ACC/ACL/WebHome\n') - f.write('Bug Reports : service-classe@cornell.edu\n') - f.write('Librarian Email : cesrulib@cornell.edu\n') - f.write('Repository Viewing : http://accserv.lepp.cornell.edu/cgi-bin/view.cgi/\n') - - mail_command = '/bin/mailx -s "Accelerator Libraries Rotation" -r ' + from_address + ' ' + mailing_list_address + ' < ' + mail_tempfile - print 'Sending e-mail notification of this rotation to mailing list...' - f.close() - p = sub.call(mail_command, - bufsize=1, - shell=True) - if os.path.exists(mail_tempfile): - os.remove(mail_tempfile) - print 'Done.' - -#def tag_release(): -# - - - -# If a tag operation was requested, compose the command(s) for tagging and -# execute them. - -# Perform the rotation of active release to archive link -# and requested build to active release. -# -archive_linkname = build_mgmt.NewLinkName(release_type) -for platform in platforms: - - # Definitions needed for online Rotation - platform_dir = builds_basedir +'/'+ platform +'/' - active_relname = active_relnames[platform] - packages_link = os.readlink(platform_dir + buildname +'/packages') - packages_dir = packages_link[3:26] - online_basedir = '/nfs/cesr/online/lib/Linux_x86_64_intel/' - online_cmd_mkdir = ("ssh cesrulib@cesrshell mkdir " + online_basedir + buildname) - online_cmd_remote_tar = ("cd " + platform_dir + buildname + "; tar -cf - . | ssh cesrulib@cesrshell 'cd "+ online_basedir + buildname + " ; tar -xf - .'") - online_cmd_rm_link = ("ssh cesrulib@cesrshell rm -f " + online_basedir + release_type) - online_cmd_new_link = ("ssh cesrulib@cesrshell 'cd " + online_basedir + " ; ln -s " + buildname + " " + release_type +"'") - online_cmd_rotate_link = ("ssh cesrulib@cesrshell 'cd " + online_basedir + " ; ln -s " + active_relname + " " + archive_linkname + "'") - online_cmd_check_packages = ("ssh cesrulib@cesrshell '[ -d " + online_basedir + packages_dir + " ] && echo yes || echo no'") - online_cmd_mkdir_packages = ("ssh cesrulib@cesrshell mkdir -p " + online_basedir + packages_dir) - online_cmd_remote_packages_tar = ("cd " + platform_dir + packages_dir + "; tar -cf - . | ssh cesrulib@cesrshell 'cd "+ online_basedir + packages_dir + " ; tar -xf - .'") - - # - print '' - print 'New '+ release_type +' Build name: ' + buildname - print 'New '+ release_type +' Build directory to copy: ' + platform_dir + buildname - print 'Old '+ release_type +' will be rotated to link name: ' + archive_linkname - print 'Old '+ release_type +': ' + active_relname - print 'CESR Online base directory to copy to: ' + online_basedir - - # ssh/tar release to CESR Online - if not os.path.exists(online_basedir+buildname): - #print online_cmd_mkdir - step1 = sub.Popen(online_cmd_mkdir, - bufsize=1, - shell=True, - stdout=sub.PIPE, - stderr=sub.STDOUT ) - print '' - print 'Making Build directory: ' + online_basedir + buildname - step1.communicate() - - #print online_cmd_remote_tar - step2 = sub.Popen(online_cmd_remote_tar, - bufsize=1, - shell=True, - stderr=sub.STDOUT ) - print '' - print 'Copying ' + buildname + ' to: ' + online_basedir + buildname - step2.communicate() - - # Check for Online packages - #print online_cmd_check_packages - step3 = sub.Popen(online_cmd_check_packages, - bufsize=1, - shell=True, - stdout=sub.PIPE, - stderr=sub.STDOUT ) - output = step3.communicate()[0] - - if "yes" in output: - print 'CESR Online Packages directory: ' + online_basedir + packages_dir - else: - # Make online packages dir - #print online_cmd_mkdir_packages - step4 = sub.Popen(online_cmd_mkdir_packages, - bufsize=1, - shell=True, - stdout=sub.PIPE, - stderr=sub.STDOUT ) - print '' - print 'Making CESR Online Packages directory: ' + online_basedir + packages_dir - step4.communicate() - - # ssh/tar over packages to online - #print online_cmd_remote_packages_tar - step5 = sub.Popen(online_cmd_remote_packages_tar, - bufsize=1, - shell=True, - stdout=sub.PIPE, - stderr=sub.STDOUT ) - print '' - print 'Copying ' + packages_dir + ' to: ' + online_basedir + packages_dir - step5.communicate() - - # Now update off-line links - os.chdir(platform_dir) - os.symlink(active_relname, archive_linkname) - os.remove(platform_dir+'/'+release_type) - os.symlink(buildname, release_type) - - # Now update on-line links - #print online_cmd_rm_link - step6 = sub.Popen(online_cmd_rm_link, - bufsize=1, - shell=True, - stdout=sub.PIPE, - stderr=sub.STDOUT ) - print '' - print 'Cleaning up old ' + release_type + ' link.' - step6.communicate() - - #print online_cmd_new_link - step7 = sub.Popen(online_cmd_new_link, - bufsize=1, - shell=True, - stdout=sub.PIPE, - stderr=sub.STDOUT ) - print '' - print 'Making new soft link from ' + buildname + ' to ' + release_type - step7.communicate() - - #print online_cmd_rotate_link - step8 = sub.Popen(online_cmd_rotate_link, - bufsize=1, - shell=True, - stdout=sub.PIPE, - stderr=sub.STDOUT ) - print '' - print 'Rotating old ' + release_type + ' ' + active_relname + ' to ' + archive_linkname - step8.communicate() - print '' - - -if send_mail: - send_notification_email(old_releasename) diff --git a/util/svn_upgrade b/util/svn_upgrade deleted file mode 100755 index f979aab251..0000000000 --- a/util/svn_upgrade +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# -# To upgrade local svn directory -# from 1.6 to current svn client -# version. -# - -LIST=$(ls -d */) - -for DIR in ${LIST} -do - echo ${DIR} - cd ${DIR} - svn upgrade - cd .. -done - diff --git a/util/tag_release b/util/tag_release deleted file mode 100755 index c4f1597b61..0000000000 --- a/util/tag_release +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python -#-*- python -*- -# - -# Compose list of top-level directories in the release that are under -# lab SVN version control. - -import sys -import build_mgmt - -if len(sys.argv) == 2: - buildname = sys.argv[1] -else: - print 'Usage:' - print ' Accepts a single argument, the name of the release to tag in SVN' - - -# Does the release exist in all platform directories? -for platform in build_mgmt.platforms: - -if not build_mgmt.BuildExists(buildname): - print 'Build does not exist in all supported platform directories.' - print 'Use command line option "--force" to perform the tag anyway.' - sys.exit(1)