import inspect # For entry/exit debug messages import os # For OS specific stuff, such as file size on disk import sys import time import csv from sys import platform import glob from optparse import OptionParser import subprocess import tarfile import zipfile import shutil import re import string ################################################# # # This is script to help SAS 9.4 customers to upgrade their postgres level from 9.1 to 9.4 for the shipped Internal Data Server. # # This script is an implementation of the manual instructions documented here: # # https://support.sas.com/en/documentation/third-party-software-reference/9-4/support-for-other-products.html # http://support.sas.com/documentation/installcenter/en/ikpostgresmg/71374/PDF/default/upgrade_postgres.pdf # ################################################# ################################################# # Skip over comments lines and blank line while reading in a CVS file ################################################# class CSVFileCleaner: def __init__ (self, file_to_wrap, comment_starts=('#', '//', '-- ')): self.wrapped_file = file_to_wrap self.comment_starts = comment_starts def next (self): line = self.wrapped_file.next() while line.strip().startswith(self.comment_starts) or len(line.strip())==0: line = self.wrapped_file.next() return line def __iter__ (self): return self ################################################# # Print to both the terminal and to a log file ################################################# class Logger(object): def __init__(self, log_fqfn): self.log_fqfn = log_fqfn self.terminal = sys.stdout self.log = open(self.log_fqfn, "a") def write(self, message): self.terminal.write(message) self.log.write(message) def close(self): self.log.close() def flush(self): # This flush method is needed for python 3 compatibility. # This handles the flush command by doing nothing. # You might want to specify some extra behavior here. pass ################################################# # Debug printing ################################################# def printd( str ): global debug_logging if debug_logging == True: print( str ) ################################################# # Setup a logger, so that std out goes to the terminal and to a log file ################################################# def setup_logger(configuration_dict): printd('Entry {0}'.format(inspect.stack()[0][3])) # Set up the logger quickly, so that any further printing goes to both the terminal and the log file my_name_fqfn = os.path.realpath(__file__) my_name_basename = os.path.basename(my_name_fqfn) my_name_core = my_name_basename.split('.')[0] my_name_fq_dir = os.path.dirname(my_name_fqfn) current_timestamp = time.strftime('%Y-%m-%d-%H.%M.%S', time.localtime()) if configuration_dict['my_args'].log_fq_dir is None: log_fqfn = my_name_fq_dir + os.sep + my_name_core + '_' + current_timestamp + '.log' else: if os.path.isdir(configuration_dict['my_args'].log_fq_dir): log_fqfn = configuration_dict['my_args'].log_fq_dir + os.sep + my_name_core + '_' + current_timestamp + '.log' else: print('Failure: You specified a log file directory via the "-l" parameter, but this directory does not exists: ' + configuration_dict['my_args'].log_fq_dir) sys.exit(1) sys.stdout = Logger(log_fqfn) sys.stderr = sys.stdout configuration_dict['log_fqfn'] = log_fqfn configuration_dict['my_name_fq_dir'] = my_name_fq_dir printd('platform.lower() ' + platform.lower()) printd('Exit {0}'.format(inspect.stack()[0][3])) ################################################# # Parse args ################################################# def parse_args(configuration_dict): printd('Entry {0}'.format(inspect.stack()[0][3])) parser = OptionParser() parser.add_option('-c', '--csvfile', dest='csv_file', help='CSV file that contains info about the SAS Data Servers to upgrade [required].') parser.add_option('-a', '--analyze', action='store_true', dest='analyze_mode', help='Analyze (locate) all the SAS Data Servers to upgrade on this machine.') parser.add_option('-v', '--validate', action='store_true', dest='validate_mode', help='Validate all info specified in the CSV file. No upgrade will be performed.') parser.add_option('-u', '--upgrade', action='store_true', dest='upgrade_mode', help='Upgrade the SAS Data Servers on this machine.') parser.add_option('-l', '--log', dest='log_fq_dir', help='FQ directory to store the log file. It must already exists. Optional. Default is same location as this .py file.') parser.add_option('-d', '--debug', action='store_true', dest='debug_logging', help='Turn debug on.') options, args = parser.parse_args() printd('options = ' + str(options)) printd('args = ' + str(args)) configuration_dict['my_args'] = options if options.debug_logging is None: pass else: global debug_logging debug_logging = options.debug_logging # Validate arguments if options.csv_file is None or len(options.csv_file) == 0: print('Failure: Option "-c" is required, which specifies the CSV file that contains info about the SAS Data Servers to upgrade.') sys.exit(1) if options.analyze_mode is None and options.validate_mode is None and options.upgrade_mode is None: print('Failure: Either option "-a" or "-v" or "-u" is required, which puts this script in Analyze or Validate or Update mode.') sys.exit(1) if options.analyze_mode is True and options.validate_mode is True: print('Failure: Both options "-a" and "-v" were specificed. Only one is allowed.') sys.exit(1) elif options.analyze_mode is True and options.upgrade_mode is True: print('Failure: Both options "-a" and "-u" were specificed. Only one is allowed.') sys.exit(1) elif options.validate_mode is True and options.upgrade_mode is True: print('Failure: Both options "-v" and "-u" were specificed. Only one is allowed.') sys.exit(1) printd('') printd('csv_file = ' + str(options.csv_file)) printd('log_fq_dir = ' + str(options.log_fq_dir)) printd('debug_logging = ' + str(options.debug_logging)) printd('Exit {0}'.format(inspect.stack()[0][3])) ################################################# # Read in a CSV file that contains info about the SAS Data Servers to upgrade ################################################# def read_csv_file( csv_file, configuration_dict ): printd('Entry {0}'.format(inspect.stack()[0][3])) printd('csv_file "' + csv_file + '"') data_servers_dict = {} if os.path.isfile(csv_file): with open(csv_file, 'rb') as csvfile: csv_rows = csv.reader(CSVFileCleaner(csvfile)) row_counter = 1 for row in csv_rows: printd('row ' + str(row_counter) + ': "' + str(row) + '"') if row != None and len(row) > 0: if row_counter == 1: configuration_dict['sas_home_fq_dir'] = row[0] printd('sas_home_fq_dir ' + configuration_dict['sas_home_fq_dir']) elif row_counter == 2: configuration_dict['sas_config_fq_dir'] = row[0] printd('sas_config_fq_dir ' + configuration_dict['sas_config_fq_dir']) elif row_counter == 3: configuration_dict['postgres_binaries_fqfn'] = row[0] printd('postgres_binaries_fqfn ' + configuration_dict['postgres_binaries_fqfn']) # The remaining rows are one-per-data-server else: col_counter = 1 data_server_info_dict = {} for col in row: # If we have some sort of real value for this column if col != None and len(col) > 0: # Data Server directory name underneath SASConfig/LevX if col_counter == 1: # In case the customer accidentally put a slash around the directory name, kindly remove it data_server_info_dict['ds_dir'] = col.strip(os.sep) # Port Number elif col_counter == 2: data_server_info_dict['ds_port'] = col # Super User elif col_counter == 3: data_server_info_dict['ds_userid'] = col # password elif col_counter == 4: data_server_info_dict['ds_password'] = col # Extra stuff, just log it quietly else: print('Found an extraneous column ' + str(col_counter) + ' with value "' + col + '" for row ' + str(row_counter) + ' in CSV file ' + csv_file + '. Ignoring and continuing.') else: print('Found a null or zero length string in column ' + str(col_counter) + ' for row ' + str(row_counter) + ' in CSV file ' + csv_file + '. Exiting...') sys.exit(1) col_counter += 1 printd('data_server_info_dict ' + str(data_server_info_dict)) data_servers_dict[data_server_info_dict['ds_dir']] = data_server_info_dict row_counter += 1 configuration_dict['data_servers_dict'] = data_servers_dict else: print('Failure: CSV file "' + csv_file + '" cannot be accessed. Exiting.') sys.exit(1) printd('configuration_dict ' + str(configuration_dict)) printd('Exit {0}'.format(inspect.stack()[0][3])) ################################################# # Validate the customer's CSV file data ################################################# def validate_csv_info( configuration_dict ): printd('Entry {0}'.format(inspect.stack()[0][3])) validate_simple_key_in_configuration_dict('sas_home_fq_dir' , 'fail', configuration_dict) validate_simple_key_in_configuration_dict('sas_config_fq_dir' , 'fail', configuration_dict) validate_simple_key_in_configuration_dict('postgres_binaries_fqfn', 'fail', configuration_dict) if not os.path.isdir(configuration_dict['sas_home_fq_dir']): print('Failure: Cannot access directory "' + configuration_dict['sas_home_fq_dir'] + '"') sys.exit(1) if not os.path.isdir(configuration_dict['sas_config_fq_dir']): print('Failure: Cannot access directory "' + configuration_dict['sas_config_fq_dir'] + '"') sys.exit(1) if not os.path.isfile(configuration_dict['postgres_binaries_fqfn']): print('Failure: Cannot access file "' + configuration_dict['postgres_binaries_fqfn'] + '"') sys.exit(1) sashome_wip_fq_dir = configuration_dict['sas_home_fq_dir'] + os.sep + 'SASWebInfrastructurePlatformDataServer' + os.sep + '9.4' if not os.path.isdir(sashome_wip_fq_dir): print('Failure: Cannot access directory "' + sashome_wip_fq_dir + '"') sys.exit(1) else: configuration_dict['sashome_wip_fq_dir'] = sashome_wip_fq_dir # No need to do any validation of Data Server info if we are in analyze mode if configuration_dict['my_args'].analyze_mode: pass else: if 'data_servers_dict' not in configuration_dict or len(configuration_dict['data_servers_dict']) == 0: print('Failure: No Data Server rows were found in CSV file "' + configuration_dict['my_args'].csv_file + '"') sys.exit(1) data_servers_dict = configuration_dict['data_servers_dict'] for data_server in data_servers_dict: data_server_info_dict = data_servers_dict[data_server] data_server_fq_dir = configuration_dict['sas_config_fq_dir'] + os.sep + data_server_info_dict['ds_dir'] if not os.path.isdir(data_server_fq_dir): print('Failure: Cannot access directory "' + data_server_fq_dir + '"') sys.exit(1) else: data_server_info_dict['data_server_fq_dir'] = data_server_fq_dir printd('Exit {0}'.format(inspect.stack()[0][3])) ################################################# # Make sure we got an expected key that came from the input csv file, and that it isn't a zero length string ################################################# def validate_simple_key_in_configuration_dict( dict_key, fail_or_warn, configuration_dict ): printd('Entry {0}'.format(inspect.stack()[0][3])) if fail_or_warn == 'fail': fail_or_warn_prefix = 'Failure' else: fail_or_warn_prefix = 'Warning' if dict_key not in configuration_dict: print(fail_or_warn_prefix + ': Missing key "' + dict_key + '" that originates from CSV file ' + configuration_dict['my_args'].csv_file) if fail_or_warn == 'fail': sys.exit(1) elif configuration_dict[dict_key] == None: print(fail_or_warn_prefix + ': Key "' + dict_key + '" is zero length string that originates from CSV file ' + configuration_dict['my_args'].csv_file) if fail_or_warn == 'fail': sys.exit(1) else: printd(dict_key + ': ' + configuration_dict[dict_key]) printd('Exit {0}'.format(inspect.stack()[0][3])) ################################################# # Analyze mode ################################################# def analyze_data_servers( configuration_dict ): printd('Entry {0}'.format(inspect.stack()[0][3])) # --------------------------------------------------------------------- # Analyze the Postgres binaries # --------------------------------------------------------------------- sashome_wip_bin_fq_dir = configuration_dict['sashome_wip_fq_dir'] + os.sep + 'bin' cmd_str = [sashome_wip_bin_fq_dir + os.sep + 'psql', '--version'] print('') print('The Postgres binaries are at level (' + ' '.join(cmd_str) + ')') result = subprocess.Popen(cmd_str, cwd=configuration_dict['my_name_fq_dir'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] print(result.replace('\r', '').rstrip('\n')) # --------------------------------------------------------------------- # Analyze the Data Servers # --------------------------------------------------------------------- pg_version_search_pattern = configuration_dict['sas_config_fq_dir'] + os.sep + '*' + os.sep + 'data' + os.sep + 'PG_VERSION' printd('pg_version_search_pattern ' + pg_version_search_pattern) pg_version_FQFN_list = glob.glob(pg_version_search_pattern) printd('pg_version_FQFN_list: ' + str(pg_version_FQFN_list)) pg_version_FQFN_list.sort() pg_91_list = [] pg_other_list = [] if len(pg_version_FQFN_list) > 0: for pg_version_FQFN in pg_version_FQFN_list: pg_FQ_dir = os.path.dirname(os.path.dirname(pg_version_FQFN)) # Get the Port number from the Data Server start script ds_dir = os.path.basename(pg_FQ_dir) start_script_fqfn = find_one_data_server_start_script(ds_dir, configuration_dict) regexp = re.compile(r'.*' + ds_dir + '[\\\/]data.* -p ([0-9]*).*') ds_port = '?' with open(start_script_fqfn) as f: for line in f: match = regexp.match(line) if match: ds_port = match.group(1) printd('ds_port "' + ds_port + '"') break; # Get the Postgres version from the PG_VERSION file pg_version_str = open(pg_version_FQFN).read() if '9.1' in pg_version_str: printd('Found 9.1 in file ' + pg_version_FQFN) pg_91_dict = {'pg_version': pg_version_str.strip(string.whitespace+'\l'), 'pg_FQ_dir': pg_FQ_dir, 'ds_port': ds_port} pg_91_list.append(pg_91_dict) else: printd('Found another pg version "' + pg_version_str + '" in ' + pg_FQ_dir) pg_other_dict = {'pg_version': pg_version_str.strip(string.whitespace+'\l'), 'pg_FQ_dir': pg_FQ_dir, 'ds_port': ds_port} pg_other_list.append(pg_other_dict) # Print out a nice report if len(pg_91_list) > 0: print('') print('Data Servers found at Postgres version 9.1 with port number (by examining file PG_VERSION and the Data Server start script)') max_len_ds_port = 0 max_len_pg_version = 0 for pg_91_dict in pg_91_list: if len(pg_91_dict['ds_port']) > max_len_ds_port: max_len_ds_port = len(pg_91_dict['ds_port']) if len(pg_91_dict['pg_version']) > max_len_pg_version: max_len_pg_version = len(pg_91_dict['pg_version']) format_str = '{0:' + str(max_len_pg_version) + '} {1:' + str(max_len_ds_port) + '} {2}' for pg_91_dict in pg_91_list: print(format_str.format(pg_91_dict['pg_version'], pg_91_dict['ds_port'], pg_91_dict['pg_FQ_dir'])) if len(pg_other_list) > 0: print('') print('Data Servers and their Postgres version and port number (by examining file PG_VERSION and the Data Server start script)') max_len_ds_port = 0 max_len_pg_version = 0 for pg_other_dict in pg_other_list: if len(pg_other_dict['ds_port']) > max_len_ds_port: max_len_ds_port = len(pg_other_dict['ds_port']) if len(pg_other_dict['pg_version']) > max_len_pg_version: max_len_pg_version = len(pg_other_dict['pg_version']) format_str = '{0:' + str(max_len_pg_version) + '} {1:' + str(max_len_ds_port) + '} {2}' for pg_other_dict in pg_other_list: print(format_str.format(pg_other_dict['pg_version'], pg_other_dict['ds_port'], pg_other_dict['pg_FQ_dir'])) else: print('') print('No Data Servers were found in directory ' + configuration_dict['sas_config_fq_dir']) printd('Exit {0}'.format(inspect.stack()[0][3])) ################################################# # Validate mode. Validate the details of each Data Server in the CSV file. # Basic CSV checking has already been performed. ################################################# def validate_data_servers(configuration_dict): printd('Entry {0}'.format(inspect.stack()[0][3])) # Verify that the Postgres binaries are at Postgres 9.1 verify_postgres_binaries_at_91(configuration_dict) # Locate each Data Servers start/stop script find_data_servers_start_script(configuration_dict) data_servers_dict = configuration_dict['data_servers_dict'] sashome_wip_bin_fq_dir = configuration_dict['sashome_wip_fq_dir'] + os.sep + 'bin' print('') print('Validate the port and credentials of each Data Server in the CSV file by 1) starting it up, and 2) running an SQL command to display the Postgres version.') # --------------------------------------------------------------------- # Loop through each of the Data Servers in the CSV file # --------------------------------------------------------------------- for data_server in data_servers_dict: data_server_info_dict = data_servers_dict[data_server] # Environment variables that are needed by the Postgres executables env_dict = dict(os.environ) pg_env_dict = {'LD_LIBRARY_PATH': configuration_dict['sashome_wip_fq_dir'] + os.sep + 'lib', 'LIBPATH' : configuration_dict['sashome_wip_fq_dir'] + os.sep + 'lib', 'PGUSER' : data_server_info_dict['ds_userid'], 'PGPASSWORD' : data_server_info_dict['ds_password'], 'PGHOST' : 'localhost', 'PGPORT' : data_server_info_dict['ds_port'] } env_dict.update(pg_env_dict) printd('env_dict: ' + str(env_dict)) # --------------------------------------------------------------------- # Make sure the Data Server is started # --------------------------------------------------------------------- cmd_str = [data_server_info_dict['start_script_fqfn'], 'start'] print('') print('Start Data Server ' + data_server_info_dict['ds_dir'] + ' (' + ' '.join(cmd_str) + ')') result = subprocess.Popen(cmd_str, cwd=configuration_dict['my_name_fq_dir'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] print(result.replace('\r', '').rstrip('\n')) time.sleep(5) # --------------------------------------------------------------------- # Display the old Postgres version # --------------------------------------------------------------------- cmd_str = [sashome_wip_bin_fq_dir + os.sep + 'psql', '-d', 'postgres', '-c', 'select version()'] print('') print('Display Data Server version ' + data_server_info_dict['ds_dir'] + ' (' + ' '.join(cmd_str) + ')') result = subprocess.Popen(cmd_str, cwd=configuration_dict['my_name_fq_dir'], env=env_dict, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] print(result.replace('\r', '').rstrip('\n')) # --------------------------------------------------------------------- # Verify that all Data Servers in SASConfig/LevX are in the CSV file # --------------------------------------------------------------------- verify_all_data_servers_are_in_csv(configuration_dict) printd('Exit {0}'.format(inspect.stack()[0][3])) ################################################# # Upgrade mode ################################################# def upgrade_data_servers(configuration_dict): printd('Entry {0}'.format(inspect.stack()[0][3])) # Verify that the Postgres binaries are at Postgres 9.1 verify_postgres_binaries_at_91(configuration_dict) # Verify that all requested Data Servers are really at Postgres 9.1 verify_data_servers_at_91(configuration_dict) # Verify that all Data Servers in SASConfig/LevX are in the CSV file verify_all_data_servers_are_in_csv(configuration_dict) # Locate each Data Servers start/stop script find_data_servers_start_script(configuration_dict) data_servers_dict = configuration_dict['data_servers_dict'] sashome_wip_bin_fq_dir = configuration_dict['sashome_wip_fq_dir'] + os.sep + 'bin' # --------------------------------------------------------------------- # Loop through each of the Data Servers # --------------------------------------------------------------------- for data_server in data_servers_dict: data_server_info_dict = data_servers_dict[data_server] # Environment variables that are needed by the Postgres executables env_dict = dict(os.environ) pg_env_dict = {'LD_LIBRARY_PATH': configuration_dict['sashome_wip_fq_dir'] + os.sep + 'lib', 'LIBPATH' : configuration_dict['sashome_wip_fq_dir'] + os.sep + 'lib', 'PGUSER' : data_server_info_dict['ds_userid'], 'PGPASSWORD' : data_server_info_dict['ds_password'], 'PGHOST' : 'localhost', 'PGPORT' : data_server_info_dict['ds_port'] } env_dict.update(pg_env_dict) printd('env_dict: ' + str(env_dict)) # --------------------------------------------------------------------- # 2. Make sure the Data Server is started # --------------------------------------------------------------------- cmd_str = [data_server_info_dict['start_script_fqfn'], 'start'] print('') print('Start Data Server ' + data_server_info_dict['ds_dir'] + ' (' + ' '.join(cmd_str) + ')') result = subprocess.Popen(cmd_str, cwd=configuration_dict['my_name_fq_dir'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] # Remove all carriage returns, as there seem to be duplicates on Windows, causing extra lines in the log. Only need line feeds. print(result.replace('\r', '').rstrip('\n')) time.sleep(5) # --------------------------------------------------------------------- # Display the old Postgres version # --------------------------------------------------------------------- cmd_str = [sashome_wip_bin_fq_dir + os.sep + 'psql', '-d', 'postgres', '-c', 'select version()'] print('') print('Display Data Server version ' + data_server_info_dict['ds_dir'] + ' (' + ' '.join(cmd_str) + ')') result = subprocess.Popen(cmd_str, cwd=configuration_dict['my_name_fq_dir'], env=env_dict, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] print(result.replace('\r', '').rstrip('\n')) # --------------------------------------------------------------------- # Display existing databases within this Data Server # --------------------------------------------------------------------- cmd_str = [sashome_wip_bin_fq_dir + os.sep + 'psql', '-d', 'postgres', '-c', '\\list'] print('') print('List the databases in Data Server ' + data_server_info_dict['ds_dir'] + ' (' + ' '.join(cmd_str) + ')') result = subprocess.Popen(cmd_str, cwd=configuration_dict['my_name_fq_dir'], env=env_dict, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] print(result.replace('\r', '').rstrip('\n')) # --------------------------------------------------------------------- # Determine an unused userid to initialize the database with # --------------------------------------------------------------------- found_unused_userid = False for x in range(0, 1000): ds_init_userid = 'updateUser' + str(x) cmd_str = [sashome_wip_bin_fq_dir + os.sep + 'psql', '-d', 'postgres', '-tAc', 'SELECT 1 FROM pg_roles WHERE rolname=\'' + ds_init_userid + '\''] printd('Run cmd: ' + ' '.join(cmd_str)) result = subprocess.Popen(cmd_str, cwd=configuration_dict['my_name_fq_dir'], env=env_dict, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] printd('result: "' + result.strip() + '"') # If the userid exists, keep looping until we find one that doesn't exists if '1' in result: pass else: found_unused_userid = True data_server_info_dict['ds_init_userid'] = ds_init_userid printd('Available userid to perform initdb with: "' + ds_init_userid + '"') break if found_unused_userid is False: print('Failure: Could not find an unused userid to initialize the new 9.4 data server with. Exiting...') sys.exit(1) # --------------------------------------------------------------------- # 3. Dump out the Data Server contents # --------------------------------------------------------------------- dump_fqfn = data_server_info_dict['data_server_fq_dir'] + os.sep + 'db_update.dat' cmd_str = [sashome_wip_bin_fq_dir + os.sep + 'pg_dumpall', '-f' , dump_fqfn] print('') print('Dump out Data Server contents for ' + data_server_info_dict['ds_dir'] + ' (' + ' '.join(cmd_str) + ')') result = subprocess.Popen(cmd_str, cwd=configuration_dict['my_name_fq_dir'], env=env_dict, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] result_stripped = result.replace('\r', '').rstrip('\n') if len(result_stripped) > 0: print(result_stripped) raw_input('Press enter to continue, or ctrl-c to exit') if not os.path.isfile(dump_fqfn): print('Failure: Dump file ' + dump_fqfn + ' was not created. Exiting...') sys.exit(1) # --------------------------------------------------------------------- # 4. Stop the Data Server # --------------------------------------------------------------------- cmd_str = [data_server_info_dict['start_script_fqfn'], 'stop'] print('') print('Stop Data Server ' + data_server_info_dict['ds_dir'] + ' (' + ' '.join(cmd_str) + ')') result = subprocess.Popen(cmd_str, cwd=configuration_dict['my_name_fq_dir'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] print(result.replace('\r', '').rstrip('\n')) time.sleep(5) # --------------------------------------------------------------------- # 5. Backup the 9.1 data store # --------------------------------------------------------------------- src_dir = configuration_dict['sas_config_fq_dir'] + os.sep + data_server_info_dict['ds_dir'] + os.sep + 'data' dst_dir = configuration_dict['sas_config_fq_dir'] + os.sep + data_server_info_dict['ds_dir'] + os.sep + 'data_91x' rename_file_dir(src_dir, dst_dir, 'directory') # --------------------------------------------------------------------- # Make sure all Data Servers have stopped # --------------------------------------------------------------------- if 'win' in platform.lower(): cmd_str = 'tasklist /FI "IMAGENAME eq postgres.exe"' else: cmd_str = 'ps -ef | grep -i postgres | grep -v " grep "' print('') print('Ensure all Data Servers have stopped (' + cmd_str + ')') result = subprocess.Popen(cmd_str, shell=True, cwd=configuration_dict['my_name_fq_dir'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] result_stripped = result.replace('\r', '').rstrip(' \n') if 'win' in platform.lower(): if 'postgres.exe' in result_stripped: print('WARNING: Analyze the following output to ensure that no Data Servers are still running.') print(result_stripped) raw_input('Press enter to continue, or ctrl-c to exit') else: print('Verified that no Postgres processes are running.') else: if len(result_stripped) > 0: print('WARNING: Analyze the following output to ensure that no Data Servers are still running.') print(result_stripped) raw_input('Press enter to continue, or ctrl-c to exit') else: print('Verified that no Postgres processes are running.') # --------------------------------------------------------------------- # 6. Backup up Postgres in SASHome # --------------------------------------------------------------------- src_dir = configuration_dict['sashome_wip_fq_dir'] + os.sep + 'bin' dst_dir = configuration_dict['sashome_wip_fq_dir'] + os.sep + 'bin_91x' rename_file_dir(src_dir, dst_dir, 'directory') src_dir = configuration_dict['sashome_wip_fq_dir'] + os.sep + 'lib' dst_dir = configuration_dict['sashome_wip_fq_dir'] + os.sep + 'lib_91x' rename_file_dir(src_dir, dst_dir, 'directory') src_dir = configuration_dict['sashome_wip_fq_dir'] + os.sep + 'share' dst_dir = configuration_dict['sashome_wip_fq_dir'] + os.sep + 'share_91x' rename_file_dir(src_dir, dst_dir, 'directory') src_dir = configuration_dict['sashome_wip_fq_dir'] + os.sep + 'include' dst_dir = configuration_dict['sashome_wip_fq_dir'] + os.sep + 'include_91x' rename_file_dir(src_dir, dst_dir, 'directory') # --------------------------------------------------------------------- # 6. Untar/Unzip the new Postgres in SASHome # --------------------------------------------------------------------- print('') if 'win' in platform.lower(): print('Unzip the new Postgres binaries ' + configuration_dict['postgres_binaries_fqfn'] + ' into ' + configuration_dict['sashome_wip_fq_dir']) zip_ref = zipfile.ZipFile(configuration_dict['postgres_binaries_fqfn'], 'r') zip_ref.extractall(configuration_dict['sashome_wip_fq_dir']) zip_ref.close() else: print('Untar the new Postgres binaries ' + configuration_dict['postgres_binaries_fqfn'] + ' into ' + configuration_dict['sashome_wip_fq_dir']) tar = tarfile.open(configuration_dict['postgres_binaries_fqfn']) tar.extractall(path=configuration_dict['sashome_wip_fq_dir']) tar.close() # --------------------------------------------------------------------- # Loop through each of the Data Server # --------------------------------------------------------------------- for data_server in data_servers_dict: data_server_info_dict = data_servers_dict[data_server] # Environment variables that are needed by the Postgres executables env_dict = dict(os.environ) pg_env_dict = {'LD_LIBRARY_PATH': configuration_dict['sashome_wip_fq_dir'] + os.sep + 'lib', 'LIBPATH' : configuration_dict['sashome_wip_fq_dir'] + os.sep + 'lib', 'PGUSER' : data_server_info_dict['ds_userid'], 'PGPASSWORD' : data_server_info_dict['ds_password'], 'PGHOST' : 'localhost', 'PGPORT' : data_server_info_dict['ds_port'] } env_dict.update(pg_env_dict) printd('env_dict: ' + str(env_dict)) # --------------------------------------------------------------------- # 7. Initialize the new postgres # --------------------------------------------------------------------- cmd_str = [sashome_wip_bin_fq_dir + os.sep + 'initdb', '-D', data_server_info_dict['data_server_fq_dir'] + os.sep + 'data', '-U', data_server_info_dict['ds_init_userid'], '-A', 'trust', '-E', 'UTF8', '--locale=C' ] print('') print('Initialize the new Data Server for ' + data_server_info_dict['ds_dir'] + ' (' + ' '.join(cmd_str) + ')') result = subprocess.Popen(cmd_str, cwd=configuration_dict['my_name_fq_dir'], env=env_dict, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] result_stripped = result.replace('\r', '').rstrip('\n') if len(result_stripped) > 0: print(result_stripped) if 'Success. You can now start the database server using' in result_stripped: pass else: print('') print('FAILURE: Did not find the success string in the above result.') raw_input('Press enter to continue, or ctrl-c to exit') # --------------------------------------------------------------------- # 8. Update the configuration files # --------------------------------------------------------------------- src_file = configuration_dict['sas_config_fq_dir'] + os.sep + data_server_info_dict['ds_dir'] + os.sep + 'data' + os.sep + 'postgresql.conf' dst_file = configuration_dict['sas_config_fq_dir'] + os.sep + data_server_info_dict['ds_dir'] + os.sep + 'data' + os.sep + 'postgresql.conf_94' rename_file_dir(src_file, dst_file, 'file') src_file = configuration_dict['sas_config_fq_dir'] + os.sep + data_server_info_dict['ds_dir'] + os.sep + 'data_91x' + os.sep + 'postgresql.conf' dst_dir = configuration_dict['sas_config_fq_dir'] + os.sep + data_server_info_dict['ds_dir'] + os.sep + 'data' copy_file(src_file, dst_dir) # Append to end of the file, for Linux only if 'win' in platform.lower(): pass else: pg_conf_file = configuration_dict['sas_config_fq_dir'] + os.sep + data_server_info_dict['ds_dir'] + os.sep + 'data' + os.sep + 'postgresql.conf' with open(pg_conf_file, 'a') as f: f.write('dynamic_shared_memory_type = sysv') # --------------------------------------------------------------------- # 9. Start the new Data Server # --------------------------------------------------------------------- cmd_str = [data_server_info_dict['start_script_fqfn'], 'start'] print('') print('Start Data Server ' + data_server_info_dict['ds_dir'] + ' (' + ' '.join(cmd_str) + ')') result = subprocess.Popen(cmd_str, cwd=configuration_dict['my_name_fq_dir'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] print(result.replace('\r', '').rstrip('\n')) time.sleep(5) # --------------------------------------------------------------------- # 10. Restore the Data Server contents # --------------------------------------------------------------------- cmd_str = [sashome_wip_bin_fq_dir + os.sep + 'psql', '-d', 'postgres', '-U', data_server_info_dict['ds_init_userid'], '--set', 'ON_ERROR_STOP=1', '-f', data_server_info_dict['data_server_fq_dir'] + os.sep + 'db_update.dat'] print('') print('Restore the Data Server contents for ' + data_server_info_dict['ds_dir'] + ' (' + ' '.join(cmd_str) + ')') result = subprocess.Popen(cmd_str, cwd=configuration_dict['my_name_fq_dir'], env=env_dict, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] result_stripped = result.replace('\r', '').rstrip('\n') if len(result_stripped) > 0: print(result_stripped) if 'ERROR:' in result_stripped: print('') print('FAILURE: Found "ERROR:" in the above result.') raw_input('Press enter to continue, or ctrl-c to exit') # --------------------------------------------------------------------- # Reassign ownership of tables to the original DBMS super admin # --------------------------------------------------------------------- cmd_str = [sashome_wip_bin_fq_dir + os.sep + 'psql', '-d', 'postgres', '-c', 'ALTER DATABASE postgres OWNER TO "' + data_server_info_dict['ds_userid'] + '"; ALTER DATABASE template0 OWNER TO "' + data_server_info_dict['ds_userid'] + '"; ALTER DATABASE template1 OWNER TO "' + data_server_info_dict['ds_userid'] + '"'] print('') print('Reassign ownership of tables to original owner (' + ' '.join(cmd_str) + ')') result = subprocess.Popen(cmd_str, cwd=configuration_dict['my_name_fq_dir'], env=env_dict, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] result_stripped = result.replace('\r', '').rstrip('\n') if len(result_stripped) > 0: print(result_stripped) if 'ERROR:' in result_stripped: print('') print('FAILURE: Found "ERROR:" in the above result.') raw_input('Press enter to continue, or ctrl-c to exit') # --------------------------------------------------------------------- # 10. Disable the login for updateUserX # --------------------------------------------------------------------- cmd_str = [sashome_wip_bin_fq_dir + os.sep + 'psql', '-d', 'postgres', '-c', 'alter role "' + data_server_info_dict['ds_init_userid'] + '" NOLOGIN;'] print('') print('Disable the login for ' + data_server_info_dict['ds_init_userid'] + ' (' + ' '.join(cmd_str) + ')') result = subprocess.Popen(cmd_str, cwd=configuration_dict['my_name_fq_dir'], env=env_dict, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] result_stripped = result.replace('\r', '').rstrip('\n') if len(result_stripped) > 0: print(result_stripped) if 'ERROR:' in result_stripped: print('') print('FAILURE: Found "ERROR:" in the above result.') raw_input('Press enter to continue, or ctrl-c to exit') # --------------------------------------------------------------------- # Display the new Postgres version # --------------------------------------------------------------------- cmd_str = [sashome_wip_bin_fq_dir + os.sep + 'psql', '-d', 'postgres', '-c', 'select version()'] print('') print('Verify Data Server version ' + data_server_info_dict['ds_dir'] + ' (' + ' '.join(cmd_str) + ')') result = subprocess.Popen(cmd_str, cwd=configuration_dict['my_name_fq_dir'], env=env_dict, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] print(result.replace('\r', '').rstrip('\n')) # --------------------------------------------------------------------- # Display existing databases within this Data Server # --------------------------------------------------------------------- cmd_str = [sashome_wip_bin_fq_dir + os.sep + 'psql', '-d', 'postgres', '-c', '\\list'] print('') print('List the databases in Data Server ' + data_server_info_dict['ds_dir'] + ' (' + ' '.join(cmd_str) + ')') result = subprocess.Popen(cmd_str, cwd=configuration_dict['my_name_fq_dir'], env=env_dict, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] print(result.replace('\r', '').rstrip('\n')) # --------------------------------------------------------------------- # 11. Stop the Data Server # --------------------------------------------------------------------- cmd_str = [data_server_info_dict['start_script_fqfn'], 'stop'] print('') print('Stop Data Server ' + data_server_info_dict['ds_dir'] + ' (' + ' '.join(cmd_str) + ')') result = subprocess.Popen(cmd_str, cwd=configuration_dict['my_name_fq_dir'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] print(result.replace('\r', '').rstrip('\n')) time.sleep(5) # --------------------------------------------------------------------- # 12. Update more Postgres configuration files # --------------------------------------------------------------------- src_file = configuration_dict['sas_config_fq_dir'] + os.sep + data_server_info_dict['ds_dir'] + os.sep + 'data' + os.sep + 'pg_hba.conf' dst_file = configuration_dict['sas_config_fq_dir'] + os.sep + data_server_info_dict['ds_dir'] + os.sep + 'data' + os.sep + 'pg_hba.conf_94' rename_file_dir(src_file, dst_file, 'file') src_file = configuration_dict['sas_config_fq_dir'] + os.sep + data_server_info_dict['ds_dir'] + os.sep + 'data_91x' + os.sep + 'pg_hba.conf' dst_dir = configuration_dict['sas_config_fq_dir'] + os.sep + data_server_info_dict['ds_dir'] + os.sep + 'data' copy_file(src_file, dst_dir) # --------------------------------------------------------------------- # 13. Rename SASEnvironmentManager directory # --------------------------------------------------------------------- src_dir = configuration_dict['sas_config_fq_dir'] + os.sep + 'Web' + os.sep + 'SASEnvironmentManager' + os.sep + 'agent-5.8.0-EE' + os.sep + 'data' dst_dir = configuration_dict['sas_config_fq_dir'] + os.sep + 'Web' + os.sep + 'SASEnvironmentManager' + os.sep + 'agent-5.8.0-EE' + os.sep + 'data_archive' rename_file_dir(src_dir, dst_dir, 'directory') # --------------------------------------------------------------------- # Verify that all Data Servers are not at 9.1, by simply re-running the analyze mode logic # --------------------------------------------------------------------- print('') print('Analyze all Data Servers in ' + configuration_dict['sas_config_fq_dir'] + ' and report their Postgres version') analyze_data_servers(configuration_dict) printd('Exit {0}'.format(inspect.stack()[0][3])) ################################################# # Verify that the Posgres binaries are currently at 9.1 ################################################# def verify_postgres_binaries_at_91(configuration_dict): printd('Entry {0}'.format(inspect.stack()[0][3])) sashome_wip_bin_fq_dir = configuration_dict['sashome_wip_fq_dir'] + os.sep + 'bin' cmd_str = [sashome_wip_bin_fq_dir + os.sep + 'psql', '--version'] result = subprocess.Popen(cmd_str, cwd=configuration_dict['my_name_fq_dir'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()[0] if 'psql (PostgreSQL) 9.1' in result: printd('The Postgres binaries in SASHome are correctly at 9.1') else: print('Failure: The Postgres binaries in SASHome are not at 9.1 level. Has the upgrade already happened?') print('Failure: The Postgres binaries are at level "' + result.rstrip('\n\r') + '" (' + ' '.join(cmd_str) + ')') print('Failure: Exiting...') sys.exit(1) printd('Exit {0}'.format(inspect.stack()[0][3])) ################################################# # Verify Data Servers to be Upgraded are currently at 9.1 ################################################# def verify_data_servers_at_91(configuration_dict): printd('Entry {0}'.format(inspect.stack()[0][3])) data_servers_dict = configuration_dict['data_servers_dict'] for data_server in data_servers_dict: data_server_info_dict = data_servers_dict[data_server] pg_version_FQFN = configuration_dict['sas_config_fq_dir'] + os.sep + data_server_info_dict['ds_dir'] + os.sep + 'data' + os.sep + 'PG_VERSION' if os.path.isfile(pg_version_FQFN): pg_version_str = open(pg_version_FQFN).read() if '9.1' in pg_version_str: printd('Verified that data server is at 9.1 ' + pg_version_FQFN) else: print('Failure: A Data Server was specified to be upgraded, but is not at Postgres 9.1.') print('Failure: It is at version "' + pg_version_str.strip() + '" (' + pg_version_FQFN + ') Exiting...') sys.exit(1) else: print('Failure: Could not find file ' + pg_version_FQFN + ' to analyze the version of Postgres. Exiting...') sys.exit(1) printd('Exit {0}'.format(inspect.stack()[0][3])) ################################################# # Verify that all Data Servers in SASConfig/LevX are in the CSV file ################################################# def verify_all_data_servers_are_in_csv(configuration_dict): printd('Entry {0}'.format(inspect.stack()[0][3])) # --------------------------------------------------------------------- # Cross check the list of Data Servers in the CSV file, with the actual Data Servers in SASConfig/LevX # --------------------------------------------------------------------- pg_version_search_pattern = configuration_dict[ 'sas_config_fq_dir'] + os.sep + '*' + os.sep + 'data' + os.sep + 'PG_VERSION' printd('pg_version_search_pattern ' + pg_version_search_pattern) pg_version_FQFN_list = glob.glob(pg_version_search_pattern) printd('pg_version_FQFN_list: ' + str(pg_version_FQFN_list)) pg_version_FQFN_list.sort() if len(pg_version_FQFN_list) > 0: problem_found = False print('') print('Cross check each Data Server in the CSV file with all Data Servers found on the file system at ' + configuration_dict['sas_config_fq_dir']) print('') for pg_version_FQFN in pg_version_FQFN_list: pg_version_str = open(pg_version_FQFN).read() pg_FQ_dir = os.path.dirname(os.path.dirname(pg_version_FQFN)) if '9.1' in pg_version_str: printd('Found 9.1 in file ' + pg_version_FQFN) # Make sure this Data Server is in the CSV file data_servers_dict = configuration_dict['data_servers_dict'] match_found = False for data_server in data_servers_dict: data_server_info_dict = data_servers_dict[data_server] data_server_fq_dir = configuration_dict['sas_config_fq_dir'] + os.sep + data_server_info_dict[ 'ds_dir'] if pg_FQ_dir == data_server_fq_dir: printd('Found a match between the CSV file of Data Servers and SASConfigLevX ' + pg_FQ_dir) match_found = True break if match_found is False: problem_found = True print('Failure: Found Data Server ' + pg_FQ_dir + ' that is NOT in the CSV file.') print(' All Data Servers have to be upgraded at the same time.') else: problem_found = True print('Failure: Found a Data Server under SASConfig/LevX that is not at version 9.1.') print(' ' + pg_FQ_dir + ' is at version ' + pg_version_str.rstrip('\n\r')) print(' This script can only be used to upgrade Postgres version 9.1.') if problem_found is True: sys.exit(1) else: print('All Data Servers in ' + configuration_dict[ 'sas_config_fq_dir'] + ' are accounted for in the CSV file.') else: print('Failure: Did not find any Data Servers under SASConfig/LevX. Is this the correct directory: ' + configuration_dict['sas_config_fq_dir']) sys.exit(1) printd('Exit {0}'.format(inspect.stack()[0][3])) ################################################# # Find all Data Server start/stop scripts ################################################# def find_data_servers_start_script(configuration_dict): printd('Entry {0}'.format(inspect.stack()[0][3])) data_servers_dict = configuration_dict['data_servers_dict'] for data_server in data_servers_dict: data_server_info_dict = data_servers_dict[data_server] start_script_fqfn = find_one_data_server_start_script(data_server_info_dict['ds_dir'], configuration_dict) data_server_info_dict['start_script_fqfn'] = start_script_fqfn printd('start_script_fqfn ' + start_script_fqfn) printd('Exit {0}'.format(inspect.stack()[0][3])) ################################################# # Find this one Data Server's start script ################################################# def find_one_data_server_start_script(ds_dir, configuration_dict): printd('Entry {0}'.format(inspect.stack()[0][3])) printd('ds_dir ' + ds_dir) data_server_fq_dir = configuration_dict['sas_config_fq_dir'] + os.sep + ds_dir start_script_search_pattern = data_server_fq_dir + os.sep if 'win' in platform.lower(): script_suffix = 'bat' else: script_suffix = 'sh' start_script_search_pattern = start_script_search_pattern + '*.' + script_suffix start_script_fqfn_list = glob.glob(start_script_search_pattern) printd('start_script_fqfn_list ' + str(start_script_fqfn_list)) if len(start_script_fqfn_list) > 1: if ds_dir == 'WebInfrastructurePlatformDataServer': start_script_fqfn = configuration_dict['sas_config_fq_dir'] + os.sep + ds_dir + os.sep + 'webinfdsvrc.' + script_suffix else: print('Unexpected: Found too many potential start/stop scripts for the Data Server in ' + configuration_dict['sas_config_fq_dir'] + os.sep + ds_dir + ' Exiting.') sys.exit(1) else: start_script_fqfn = start_script_fqfn_list[0] printd('start_script_fqfn ' + start_script_fqfn) if not os.path.isfile(start_script_fqfn): print('Failure: Did not find a start script for Data Server ' + ds_dir) sys.exit(1) printd('Exit {0}'.format(inspect.stack()[0][3])) return(start_script_fqfn) ################################################# # Rename a file or directory ################################################# def rename_file_dir(src_item, dst_item, file_or_dir): printd('Entry {0}'.format(inspect.stack()[0][3])) print('') print('Rename ' + src_item + ' to ' + dst_item) if not os.path.exists(src_item): print('Failure: Source ' + file_or_dir + ' does not exists. ' + src_item) raw_input('Press enter to continue, or ctrl-c to exit') if os.path.exists(dst_item): print('Failure: Backup ' + file_or_dir + ' exists. ' + src_item) raw_input('Press enter to continue, or ctrl-c to exit') if os.path.exists(src_item) and not os.path.exists(dst_item): os.rename(src_item, dst_item) if os.path.exists(src_item): print('Failure: Source ' + file_or_dir + ' still exists. ' + src_item) raw_input('Press enter to continue, or ctrl-c to exit') if not os.path.exists(dst_item): print('Failure: Backup ' + file_or_dir + ' does not exist. ' + dst_item) raw_input('Press enter to continue, or ctrl-c to exit') else: print('Failure: Either the Source still does not exists or the Backup still exists. Exiting...') sys.exit(1) printd('Exit {0}'.format(inspect.stack()[0][3])) ################################################# # Copy a file ################################################# def copy_file(src_file, dst_dir): printd('Entry {0}'.format(inspect.stack()[0][3])) if os.path.isfile(src_file): if os.path.isdir(dst_dir): print('') print('Copy file ' + src_file + ' to ' + dst_dir) shutil.copy2(src_file, dst_dir) src_file_core = os.path.basename(src_file) dst_file = dst_dir + os.sep + src_file_core if not os.path.isfile(dst_file): print('Failure: File ' + dst_file + ' does not exist after the copy') raw_input('Press enter to continue, or ctrl-c to exit') else: print('Failure: Directory ' + dst_dir + ' does not exist, while trying to copy this file into it: ' + src_file) raw_input('Press enter to continue, or ctrl-c to exit') else: print('Failure: File ' + src_file + ' does not exist, while trying to copy it into directory ' + dst_dir) raw_input('Press enter to continue, or ctrl-c to exit') printd('Exit {0}'.format(inspect.stack()[0][3])) ################################################# # Main ################################################# if __name__ == '__main__': global debug_logging debug_logging = False printd('Entry {0}'.format(inspect.stack()[0][1])) configuration_dict = {} # Parse the args parse_args(configuration_dict) # Setup a logger setup_logger(configuration_dict) current_timestamp = time.strftime('%Y-%m-%d-%H.%M.%S', time.localtime()) print('Entry {0} {1}'.format(inspect.stack()[0][1], current_timestamp)) # Read in a CSV file that contains info about the SAS Data Servers to upgrade read_csv_file( configuration_dict['my_args'].csv_file, configuration_dict ) # Validate the CSV info validate_csv_info(configuration_dict) # If analyze mode if configuration_dict['my_args'].analyze_mode is True: analyze_data_servers(configuration_dict) print('') print('Note: This script is to upgrade only Postgres version 9.1') # Else if validate mode elif configuration_dict['my_args'].validate_mode is True: validate_data_servers(configuration_dict) # Else if upgrade mode elif configuration_dict['my_args'].upgrade_mode is True: upgrade_data_servers(configuration_dict) else: print('Failure: No mode of operation has been specified. Exiting without doing anything.') ################################################# # Exit ################################################# print('') current_timestamp = time.strftime('%Y-%m-%d-%H.%M.%S', time.localtime()) print('Exit {0} {1}'.format(inspect.stack()[0][1], current_timestamp)) sys.stdout.close()