#!/usr/bin/env python3 # -*- encoding: UTF8 -*- # test_WinSDP.py # Tests SDP (Server Deployment Package) on Windows VMs # Intended to be run from within a Windows box on which it can freely do things like: # - remove c:\p4 and sub-directories # - stop/uninstall services such as p4_1 or p4_master # So IT IS IMPORTANT you do not run these tests on a production machine! # See documentation and run_tests.sh in /sdp/main/test/README.md from __future__ import print_function import os, sys, re, socket, time, shutil, logging, time, stat import tempfile import P4 import platform import unittest import fileinput, subprocess import argparse import glob LOGGER_NAME = 'SDPWinTest' MAILTO = 'mailto-admin@example.com' MAILFROM = 'mailfrom-admin@example.com' logger = logging.getLogger(LOGGER_NAME) single_instance = None class NotAdmin(Exception): pass def get_host_ipaddress(): try: address = socket.gethostbyname(socket.gethostname()) # On my system, this always gives me 127.0.0.1. Hence... except: address = '' if not address or address.startswith('127.'): # ...the hard way. s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect(('4.2.2.1', 0)) address = s.getsockname()[0] s.detach() logger.debug('IPAddress: %s' % address) return address def init_logging(): global logger logger.setLevel(logging.DEBUG) formatter = logging.Formatter('%(asctime)s:%(name)s:%(levelname)s: %(message)s') logfile = '%s/%s.log' % (tempfile.gettempdir(), LOGGER_NAME) fh = logging.FileHandler(logfile, mode='w') fh.setLevel(logging.DEBUG) fh.setFormatter(formatter) logger.addHandler(fh) print("Logging to %s" % logfile) class OSUtils(object): """OS specific functions""" def __init__(self): self.windows = platform.platform().startswith("Windows") self.unix = not self.windows self._instance_dir = None self._bin_dir = None self._root_dir = None def check_initialised(self): if not self._instance_dir: raise Exception("Instance dir not set") def instance_dir(self): self.check_initialised() return self._instance_dir def bin_dir(self): self.check_initialised() return self._bin_dir def root_dir(self): self.check_initialised() return self._root_dir def set_instance(self, instance): if self.windows: self._instance_dir = "c:/p4/%s" % instance self._bin_dir = "c:/p4/common/bin" self._root_dir = "c:/p4" else: self._instance_dir = "/p4/%s" % instance self._bin_dir = "/p4/common/bin" self._root_dir = "/p4" utils = OSUtils() def do_unlink(filename): "Unlink file if it exists" if os.path.lexists(filename): os.unlink(filename) def substitute_p4vars(line, instance, port): line = line.rstrip() if line.startswith('export MAILTO='): print("export MAILTO=%s" % MAILTO) elif line.startswith('export SSL_PREFIX=ssl:'): print("export SSL_PREFIX=") elif line.startswith('export MAILFROM='): print("export MAILFROM=%s" % MAILFROM) elif line.startswith('export P4PORTNUM='): print("export P4PORTNUM=%s" % port) elif line.startswith('export KEEPLOGS='): print("export KEEPLOGS=3") elif line.startswith('export KEEPCKPS='): print("export KEEPCKPS=3") elif line.startswith('export KEEPJNLS='): print("export KEEPJNLS=3") else: print(line) class SDPTest_base(unittest.TestCase): "Generic test class for others to inherit" def setup_everything(self): if not utils.windows: return try: result = subprocess.check_call("net session > NUL 2>&1", shell=True, timeout=20) except Exception as e: raise NotAdmin("This test harness must be run with administrator level privileges or it will not work") def setUp(self): self.setup_everything() def run_test(self): pass def run_cmd(self, cmd, get_output=True, timeout=35, stop_on_error=True): "Run cmd logging input and output" output = "" try: logger.debug("Running: %s" % cmd) if (re.search("\.ps1", cmd, re.IGNORECASE)): cmd = 'powershell.exe -ExecutionPolicy Bypass -Command "& %s"' % cmd logger.debug("Updated to be: %s" % cmd) if get_output: try: proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, bufsize=0, universal_newlines=True) outs, errs = proc.communicate(timeout=timeout) except subprocess.TimeoutExpired: proc.kill() outs, errs = proc.communicate() output = outs + errs logger.debug("Output:\n%s\nErrror: %s" % (outs, errs)) else: result = 0 try: proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, bufsize=0, universal_newlines=True) outs, errs = proc.communicate(timeout=timeout) result = proc.returncode except subprocess.TimeoutExpired: proc.kill() outs, errs = proc.communicate() output = outs + errs result = proc.returncode logger.debug('Result: %d, %s\n%s' % (result, outs, errs)) except Exception as e: logger.warning(("Exception: %s" % (str(e)))) if get_output: logger.debug("Failed Output: %s" % output) if stop_on_error: msg = 'Failed admin_cmd: %s' % str(e) logger.debug(msg) self.fail(msg) return output def admin_cmd(self, cmd, get_output=True, stop_on_error=True): "Run cmd with sudo - no different on Windows" if utils.unix: cmd = "sudo %s" % cmd return self.run_cmd(cmd, get_output=get_output, stop_on_error=stop_on_error) def configure_sdp_setup(self, instance): "Configure sdp_master_config.ini with a couple of key variables" return # TODO ipaddr = get_host_ipaddress() for line in fileinput.input(mkdirs_file, inplace=True): line = line.rstrip() if line.startswith('P4MASTERHOST'): print("P4MASTERHOST=%s" % ipaddr) elif line.startswith('P4ADMINPASS'): print("P4ADMINPASS=Password1") elif line.startswith('MASTERINSTANCE'): print("MASTERINSTANCE=%s" % instance) elif line.startswith('HOST_IP'): print("HOST_IP=%s" % ipaddr) else: print(line) def run_mkdirs(self, instance): "Runs the relevant script - valid for Unix only" if utils.windows: return output = self.admin_cmd("%s %s" % (mkdirs_file, instance)) valid_lines = ["Verified: Running as root.", "Setting permissions on depot files - this may take some time ...", "Setting ownership on depot files - this may take some time ...", "It is recommended that the perforce's umask be changed to 0026 to block world access to Perforce files.", "Add umask 0026 to perforce's .bash_profile to make this change."] for line in output.split('\n'): line = line.strip() if line and line not in valid_lines: self.fail('Unexpected line in mkdirs output:\n%s' % line) def p4service(self, action, instance, stop_on_error=True): "Start or stop service" if utils.windows: cmd = "net %s p4_%s" % (action, instance) else: cmd = "%s/p4d_%s_init %s" % (utils.instance_dir(), instance, action) self.run_cmd(cmd, get_output=False, stop_on_error=stop_on_error) def check_dirs_exist(self, rootdir, dirlist): "Checks specified directories are present" found_dirs = [] for path, dirs, files in os.walk(rootdir): found_dirs.add(path) for d in [x.strip() for x in dirlist.split()]: self.assertIn(d, found_dirs) def remove_test_dirs(self, instances): "Remove all appropriate directories created" dirs_to_remove = "/depotdata/sdp /depotdata/p4 /metadata/p4 /logs/p4".split() for d in dirs_to_remove: if os.path.exists(d): self.admin_cmd("rm -rf %s" % d) for instance in instances: for f in ["/p4/%s" % instance, "/p4/common"]: if os.path.lexists(f): self.admin_cmd("unlink %s" % f) def readLog(self, log_name, instance): "Read the appropriate log file contents" with open('%s/logs/%s' % (utils.instance_dir(), log_name), 'r') as fh: log_contents = fh.read() if utils.windows: # Easier to change slashes than deal with double quoting for regex matches etc return re.sub('\\\\', '/', log_contents) return log_contents def p4run(self, *args): "Run the command logging" logger.debug('p4 cmd: %s' % ",".join([str(x) for x in args])) result = self.p4.run(args) logger.debug('result: %s' % str(result)) return result #--- Test Cases class configure_master(SDPTest_base): def liveCheckpointTest(self, instance): "Test live checkpoint script" self.assertFalse(os.path.exists('%s/offline_db/db.domain' % utils.instance_dir())) self.run_cmd(r'c:\p4\common\bin\live-checkpoint.ps1 -SDPInstance %s' % instance) # Quick check on c=log file contents log_contents = self.readLog('checkpoint.log', instance) self.assertRegex(log_contents, r"Checkpointing to %s/checkpoints/p4_%s.ckp" % (utils.instance_dir(), instance)) self.assertRegex(log_contents, "journal") # Make sure offline db is present self.assertTrue(os.path.exists('%s/offline_db/db.domain' % utils.instance_dir())) def recreateOfflineDBTest(self, instance): "Test recreate_offline_db script" self.assertTrue(os.path.exists('%s/offline_db/db.domain' % utils.instance_dir())) self.admin_cmd("del /f/q %s/offline_db/db.*" % utils.instance_dir()) self.run_cmd('%s/create-offline-db-from-checkpoint.ps1 %s' % (utils.bin_dir(), instance)) # Quick check on log file contents logPattern = '%s/logs/recreate-offline-db-from-checkpoint.log*' % utils.instance_dir() logfiles = glob.glob(logPattern) self.assertEqual(1, len(logfiles)) log_contents = self.readLog(os.path.basename(logfiles[0]), instance) self.assertRegex(log_contents, "Starting Recreate") regex = re.compile(r"Recovering from %s/checkpoints/p4_%s.ckp" % ( utils.instance_dir(), instance), re.IGNORECASE) self.assertRegex(log_contents, regex) # Make sure offline db is present self.assertTrue(os.path.exists('%s/offline_db/db.domain' % utils.instance_dir())) def failedDailyBackupTest(self, instance): "Test daily backup script - expected to fail due to lack of offline db" jnl_counter = self.p4run('counter', 'journal')[0]['value'] self.run_cmd(r'%s/daily-backup.ps1 -SDPInstance %s' % (utils.bin_dir(), instance)) # Quick check on c=log file contents log_contents = self.readLog('checkpoint.log', instance) self.assertRegex(log_contents, "Offline database not in a usable state") new_jnl_counter = self.p4run('counter', 'journal')[0]['value'] self.assertEqual(int(new_jnl_counter), int(jnl_counter)) def dailyBackupTest(self, instance): "Test daily backup script" jnl_counter = self.p4run('counter', 'journal')[0]['value'] self.run_cmd('%s/daily-backup.ps1 %s' % (utils.bin_dir(), instance)) # Quick check on c=log file contents log_contents = self.readLog('checkpoint.log', instance) self.assertRegex(log_contents, r"Dumping to %s/checkpoints/p4_%s.ckp" % (utils.instance_dir(), instance)) self.assertRegex(log_contents, "journal") new_jnl_counter = self.p4run('counter', 'journal')[0]['value'] self.assertEqual(int(new_jnl_counter), int(jnl_counter) + 1) def verifyTest(self, instance): "Test verify script" verify_cmd = '%s/p4verify.bat %s' % (utils.bin_dir(), instance) self.run_cmd(verify_cmd) log_contents = self.readLog('p4verify.log', instance) for depot in ["depot", "specs"]: verify_ok = re.compile("verify -qz //%s/...\nexit: 0" % depot, re.MULTILINE) self.assertRegex(log_contents, verify_ok) # Streams depot doesn't have any files so gives an error - we just search for it #TODO - verify other types of depots #self.assertRegex(log_contents, re.compile("verify -qz //streams/...\n[^\n]*\nexit: 0", re.MULTILINE)) #self.assertRegex(log_contents, re.compile("verify -U -q //unload/...\n[^\n]*\nexit: 0", re.MULTILINE)) self.assertNotRegex(log_contents, "//archive") # Now create verify errors and make sure we see them orig_depot_name = 'c:/p4/%s/depots/depot' % instance new_depot_name = orig_depot_name + '.new' os.rename(orig_depot_name, new_depot_name) self.run_cmd(verify_cmd, stop_on_error=False) log_contents = self.readLog('p4verify.log', instance) for depot in ["depot"]: verify_ok = re.compile("verify -qz //%s/...\nerror: [^\n]*MISSING!\nexit: 1" % depot, re.MULTILINE) self.assertRegex(log_contents, verify_ok) # Rename things back again and all should be well! os.rename(new_depot_name, orig_depot_name) self.run_cmd(verify_cmd, stop_on_error=True) log_contents = self.readLog('p4verify.log', instance) for depot in ["depot", "specs"]: verify_ok = re.compile("verify -qz //%s/...\nexit: 0" % depot, re.MULTILINE) self.assertRegex(log_contents, verify_ok) def failedWeeklyBackupTest(self, instance): "Test weekly backup script - expected to fail due to lack of offline db" jnl_counter = self.p4run('counter', 'journal')[0]['value'] self.run_cmd('%s/recreate_db_checkpoint.ps1 %s' % (utils.bin_dir(), instance), stop_on_error=False) # Quick check on c=log file contents log_contents = self.readLog('checkpoint.log', instance) self.assertRegex(log_contents, "Offline database not in a usable state") self.p4.disconnect() # Need to reconnect as weekly has restarted p4d self.p4.connect() new_jnl_counter = self.p4run('counter', 'journal')[0]['value'] self.assertEqual(int(new_jnl_counter), int(jnl_counter)) def weeklyBackupTest(self, instance): "Test weekly backup script" jnl_counter = self.p4run('counter', 'journal')[0]['value'] self.run_cmd('/p4/common/bin/recreate-live-from-offline-db.ps1 %s' % instance) # Quick check on c=log file contents log_contents = self.readLog('checkpoint.log', instance) self.assertRegex(log_contents, "Rotating %s/logs/journal to %s/checkpoints/p4_ftm1.jnl" % ( utils.instance_dir(), utils.instance_dir())) self.assertRegex(log_contents, "journal") self.p4.disconnect() # Need to reconnect as weekly has restarted p4d self.p4.connect() new_jnl_counter = self.p4run('counter', 'journal')[0]['value'] self.assertEqual(int(new_jnl_counter), int(jnl_counter) + 1) def configureServer(self, instance): "Set various configurables for master" configurables = """ security=3 run.users.authorize=1 db.peeking=2 dm.user.noautocreate=2 dm.user.resetpassword=1 filesys.P4ROOT.min=1G filesys.depot.min=1G filesys.P4JOURNAL.min=1G p4 configure unset monitor server=3 net.tcpsize=256k lbr.bufsize=256k server.commandlimits=2 serverlog.retain.3=7 serverlog.retain.7=7 serverlog.retain.8=7""".split("\n") instance_configurables = """ journalPrefix=c:/p4/${SDP_INSTANCE}/checkpoints/p4_${SDP_INSTANCE} server.depot.root=c:/p4/${SDP_INSTANCE}/depots""".split("\n") for c in [x.strip() for x in configurables]: self.p4run("configure set %s" % c) for ic in instance_configurables: ic = ic.strip() ic.replace("${SDP_INSTANCE}", instance) self.p4run("configure set %s" % ic) def configureReplication(self): "Configures stuff required for replication" def resetTest(self, instances): for instance in instances: try: self.p4service("stop", instance) except: pass self.run_cmd(r"del /f/q c:\p4\%s\root\*.*" % instance, stop_on_error=False) self.run_cmd(r"del /f/q c:\p4\%s\offline_db\*.*" % instance, stop_on_error=False) self.run_cmd(r"del /f/q c:\p4\%s\logs\*.*" % instance, stop_on_error=False) self.run_cmd(r"del /f/q c:\p4\%s\checkpoints\*.*" % instance, stop_on_error=False) #self.admin_cmd("cp -R /sdp /depotdata/sdp") #self.admin_cmd("sudo chown -R perforce:perforce /depotdata/sdp") #for f in ["/p4/p4.crontab", "/p4/p4.crontab.replica", "/p4/p4.crontab.edge"]: # if os.path.exists(f): # os.remove(f) def setOfflineDBState(self, instance, usable=True): logger.debug("Setting offline_db state to %s", str(usable)) fname = r"%s/offline_db/offline_db_usable.txt" % utils.instance_dir() if usable: with open(fname, 'w') as fh: fh.write('Offline DB is usable\n') else: os.remove(fname) def installInstance(self, instance, port): "Install the specified instance" # Stop the Perforce service if currently running from a previous run in case it is accessing dirs self.configure_sdp_setup(instance) return # TODO self.run_mkdirs(instance) depotdata_dir_list = """ /depotdata/p4 /depotdata/p4/SDP_INSTANCE /depotdata/p4/SDP_INSTANCE/depots /depotdata/p4/SDP_INSTANCE/bin /depotdata/p4/SDP_INSTANCE/checkpoints /depotdata/p4/common /depotdata/p4/common/bin /depotdata/p4/common/bin/triggers /depotdata/p4/common/lib""".replace("SDP_INSTANCE", instance) logdata_dir_list = """ /logs /logs/p4 /logs/p4/SDP_INSTANCE /logs/p4/SDP_INSTANCE/tmp /logs/p4/SDP_INSTANCE/logs""".replace("SDP_INSTANCE", instance) metadata_dir_list = """ /metadata /metadata/p4 /metadata/p4/SDP_INSTANCE /metadata/p4/SDP_INSTANCE/root /metadata/p4/SDP_INSTANCE/root/save /metadata/p4/SDP_INSTANCE/offline_db""".replace("SDP_INSTANCE", instance) self.check_dirs_exist('/depotdata', depotdata_dir_list) self.check_dirs_exist('/logs', logdata_dir_list) self.check_dirs_exist('/metadata', metadata_dir_list) configure_instance_vars(instance, port) configure_p4_vars(instance, port) def configure_p4d_instance(self, p4, instance): # Create our user and set password logger.debug('Creating user and setting password') user = p4.fetch_user('perforce') p4.save_user(user) p4.run_password('', 'Password1') p4.password = 'Password1' p4.run_login() # Make him superuser prot = p4.fetch_protect() p4.save_protect(prot) # Things to setup # - create spec depot # - create a workspace and add at least one file # - configure the various tunables # - create server definitions - master and replica # - create service user for replica logger.debug('Set configurables') p4.run('configure', 'set', '%s#server.depot.root=c:/p4/%s/depots' % (instance, instance)) p4.run('admin', 'restart') p4.disconnect() # New depot won't show up unless we do this time.sleep(1) p4.connect() logger.debug('Create depots') depot = p4.fetch_depot('specs') self.assertEqual(depot['Map'], 'specs/...') depot['Type'] = 'spec' p4.save_depot(depot) depot = p4.fetch_depot('unload') self.assertEqual(depot['Map'], 'unload/...') depot['Type'] = 'unload' p4.save_depot(depot) depot = p4.fetch_depot('archive') self.assertEqual(depot['Map'], 'archive/...') depot['Type'] = 'archive' p4.save_depot(depot) depot = p4.fetch_depot('streams') self.assertEqual(depot['Map'], 'streams/...') depot['Type'] = 'stream' p4.save_depot(depot) p4.disconnect() # New depot won't show up unless we do this p4.connect() depots = p4.run_depots() self.assertEqual(5, len(depots)) logger.debug('Create workspace and add file') ws_name = 'test_ws' ws = p4.fetch_client(ws_name) ws['Root'] = '%s/test_ws' % (tempfile.gettempdir()) ws['View'] = ['//depot/main/... //%s/...' % ws_name] p4.save_client(ws) p4.client = ws_name if not os.path.exists(ws['Root']): os.mkdir(ws['Root']) fname = '%s/%s/file1' % (tempfile.gettempdir(), ws_name) if os.path.exists(fname): os.chmod(fname, stat.S_IWRITE) os.unlink(fname) with open(fname, 'w') as fh: fh.write('test data\n') p4.run_add(fname) chg = p4.fetch_change() chg['Description'] = 'Initial file' p4.save_submit(chg) changes = p4.run_changes() self.assertEqual(1, len(changes)) def assertLogCount(self, expected, logPattern): logger.debug("Looking for logs: %s" % logPattern) logs = glob.glob(logPattern) self.assertEqual(3, len(logs)) def instanceTest(self, instance, port): # So now we want to start up the Perforce service self.p4service("start", instance) p4 = P4.P4() self.p4 = p4 p4.port = 'localhost:%s' % port p4.user = 'perforce' p4.connect() self.configure_p4d_instance(p4, instance) # - run daily_checkpoint - check for error # - run live_checkpoint - make sure offline_db seeded # - run daily checkpoint more than once - create change lists in between times # - run weekly_checkpoint - check result ckpLogPattern = '%s/logs/checkpoint.log*' % utils.instance_dir() logPattern = '%s/logs/%s.log*' % (utils.instance_dir(), instance) # Following 2 tests should fail due to lack of offline_db self.failedDailyBackupTest(instance) self.failedWeeklyBackupTest(instance) self.setOfflineDBState(instance, True) self.liveCheckpointTest(instance) # Now the offline_db should exist # Run enough times to ensure logs get removed - (KEEPCKPS + 1) self.dailyBackupTest(instance) self.assertLogCount(3, ckpLogPattern) self.dailyBackupTest(instance) self.assertLogCount(3, ckpLogPattern) self.dailyBackupTest(instance) self.assertLogCount(3, ckpLogPattern) # Manually rotate journals again and ensure daily backup handles that self.p4run('admin', 'journal', '/p4/%s/checkpoints/p4_%s' % (instance, instance)) self.dailyBackupTest(instance) self.assertLogCount(3, ckpLogPattern) self.verifyTest(instance) # Tests: # - totalusers.py # - the various _init scripts # print('\n\nAbout to run weekly backup which sleeps for 30 seconds, so be patient...!') self.weeklyBackupTest(instance) self.assertLogCount(3, ckpLogPattern) self.assertLogCount(3, logPattern) time.sleep(1) self.dailyBackupTest(instance) self.assertLogCount(3, ckpLogPattern) self.assertLogCount(3, logPattern) # Delete offline_db and check we can recreate self.recreateOfflineDBTest(instance) self.assertLogCount(3, ckpLogPattern) self.assertLogCount(3, logPattern) time.sleep(1) self.dailyBackupTest(instance) # Note Daily doesn't increase the journal number so there are 2 with latest self.assertLogCount(3, ckpLogPattern) self.assertLogCount(3, logPattern) # print(p4.run_admin('stop')) def runTest(self): "Configure the master instance" all_instances = {"ftm1": "2111"} if single_instance: if single_instance in all_instances: instances = {single_instance: all_instances[single_instance]} else: instances = all_instances for instance, port in instances.items(): utils.set_instance(instance) self.resetTest(all_instances.keys()) self.installInstance(instance, port) self.instanceTest(instance, port) if __name__ == "__main__": init_logging() parser = argparse.ArgumentParser(add_help=False) parser.add_argument('--instance', default=None) options, args = parser.parse_known_args() testrunner = None if (options.instance): single_instance = options.instance unittest.main(testRunner=testrunner, argv=sys.argv[:1] + args)
# | Change | User | Description | Committed | |
---|---|---|---|---|---|
#8 | 28226 | C. Thomas Tyler | chmod +x | ||
#7 | 26782 | Robert Cowham |
Update tests of multiple replicas vis docker compose for mkrep.sh/ansible/loacheckpoint.sh Pre-cursor to turning on these tests as part of CI |
||
#6 | 20719 | Robert Cowham | Refactored - on the way to cross platform compatibility... | ||
#5 | 20699 | Robert Cowham | Refactored - on the way towards being cross platform. | ||
#4 | 20646 | Robert Cowham |
Got it working as far daily_backup tests. Refactor a few names. Auto wrap .ps1 commands. |
||
#3 | 20640 | Robert Cowham | Test gets as far as liveCheckpoint | ||
#2 | 20629 | Robert Cowham | Progress made with executing a command | ||
#1 | 20626 | Robert Cowham | Starting out with Windows test harness - not fully working |