#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2011-2014 Sven Erik Knop/Robert Cowham, Perforce Software Ltd # # ======================================== # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL PERFORCE # SOFTWARE, INC. BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # User contributed content on the Perforce Public Depot is not supported by Perforce, # although it may be supported by its author. This applies to all contributions # even those submitted by Perforce employees. # ======================================== # P4Transfer.py # # This python script will transfer Perforce changelists with all contents # between independent servers when no remote depots are possible. # # This script transfers changes in one direction - from a source server to a target server. # There is an alternative called PerforceExchange.py which transfers changes # in both directions. # # Usage: # python2 P4Transfer.py [options] # # The -h/--help option describes all options. # # The script requires a config file, normally called transfer.cfg, # that provides the Perforce connection information for both servers. # The config file has three sections: [general], [source] and [target]. # See DEFAULT_CONFIG for details. An initial example can be generated, e.g. # # P4Transfer.py --sample-config > transfer.cfg # # The script also needs a directory in which it can place the mapped files. # This directory has to be the root of both servers' workspaces (this will be verified). from __future__ import print_function VERSION = """$Id: //guest/perforce_software/p4transfer/P4Transfer.py#72 $""" import sys, re, hashlib, stat from collections import OrderedDict import P4 import pprint from string import Template def logrepr(self): return pprint.pformat(self.__dict__, width=240) # Log messages just once per run alreadyLogged = {} def logOnce(logger, *args): global alreadyLogged msg = ", ".join([str(x) for x in args]) if not msg in alreadyLogged: alreadyLogged[msg] = 1 logger.debug(msg) P4.Revision.__repr__ = logrepr P4.Integration.__repr__ = logrepr P4.DepotFile.__repr__ = logrepr python3 = sys.version_info[0] >= 3 if sys.hexversion < 0x02070000 or (0x0300000 <= sys.hexversion < 0x0303000): sys.exit("Python 2.7 or 3.3 or newer is required to run this program.") # Although this should work with Python 3, it doesn't currently handle Windows Perforce servers # with filenames containing charaters such as umlauts etc: åäö if python3: from configparser import ConfigParser else: from ConfigParser import ConfigParser import argparse import os.path from datetime import datetime import logging import time import logutils class P4TException(Exception): pass class P4TLogicException(Exception): pass class P4TConfigException(P4TException): pass CONFIG = 'transfer.cfg' GENERAL_SECTION = 'general' SOURCE_SECTION = 'source' TARGET_SECTION = 'target' LOGGER_NAME = "P4Transfer" # This is for writing to sample config file - OrderedDict used to preserve order of lines. DEFAULT_CONFIG = OrderedDict({ GENERAL_SECTION: OrderedDict([ ("# counter_name: Unique counter on target server to use for recording source changes processed. No spaces.", None), ("# Name sensibly if you have multiple instances transferring into the same target p4 repository.", None), ("# The counter value represents the last transferred change number - script will start from next change.", None), ("# If not set, or 0 then transfer will start from first change.", None), ("counter_name", "p4transfer_counter"), ("# instance_name: Name of the instance of P4Transfer - for emails etc. Spaces allowed.", None), ("instance_name", "Perforce Transfer from XYZ"), ("# For notification - testing if smtp not available - expects a pre-configured nms FormMail instance", None), ("mail_form_url", ""), ("# The mail_* parameters must all be valid (non-blank) to receive email updates during processing. ", None), ("# mail_to: One or more valid email addresses - comma separated for multiple values", None), ("# E.g. somebody@example.com,somebody-else@example.com", None), ("mail_to", ""), ("# mail_from: Email address of sender of emails, E.g. p4transfer@example.com", None), ("mail_from", ""), ("# mail_server: The SMTP server to connect to for email sending, E.g. smtpserver.example.com", None), ("mail_server", ""), ("# sleep_on_error_interval: How long (in minutes) to sleep when error is encountered in the script", None), ("sleep_on_error_interval", "60"), ("# poll_interval: How long (in minutes) to wait between polling source server for new changes", None), ("poll_interval", "60"), ("# change_batch_size: changelists are processed in batches of this size", None), ("change_batch_size", "20000"), ("# The following *_interval values result in reports, but only if mail_* values are specified", None), ("# report_interval: Interval (in minutes) between regular update emails being sent", None), ("report_interval", "30"), ("# error_report_interval: Interval (in minutes) between error emails being sent e.g. connection error", None), ("# Usually some value less than report_interval. Useful if transfer being run with --repeat option.", None), ("error_report_interval", "15"), ("# summary_report_interval: Interval (in minutes) between summary emails being sent e.g. changes processed", None), ("# Typically some value such as 1 week (10080 = 7 * 24 *60). Useful if transfer being run with --repeat option.", None), ("summary_report_interval", "10080"), ("# sync_progress_size_interval: Size in bytes controlling when syncs are reported to log file. ", None), ("# Useful for keeping an eye on progress for large syncs over slow links. ", None), ("sync_progress_size_interval", "500000000"), ("# change_description_format: The standard format for transferred changes. ", None), ("# Keywords prefixed with $. Use \\n for newlines. Keywords allowed: ", None), ("# $sourceDescription, $sourceChange, $sourcePort, $sourceUser ", None), ("change_description_format", "$sourceDescription\\n\\nTransferred from p4://$sourcePort@$sourceChange"), ("# change_map_file: An (optional) CSV file listing mappings of source/target changelists. ", None), ("# If this is blank (DEFAULT) then no mapping file is created.", None), ("# If non-blank, then the name of the file in the target workspace is appended to", None), ("# and will be submitted after every sequence of changes is made.", None), ("# Default type of this file is text+CS32 to avoid storing too many revisions.", None), ("change_map_file", ""), ]), SOURCE_SECTION: OrderedDict([ ("# P4PORT to connect to, e.g. some-server:1666", None), ("p4port", ""), ("# P4USER to use", None), ("p4user", ""), ("# P4CLIENT to use, e.g. p4-transfer-client", None), ("p4client", ""), ("# P4PASSWD for the user - valid password. If blank then no login performed.", None), ("p4passwd", "")]), TARGET_SECTION: OrderedDict([ ("# P4PORT to connect to, e.g. some-server:1666", None), ("p4port", ""), ("# P4USER to use", None), ("p4user", ""), ("# P4CLIENT to use, e.g. p4-transfer-client", None), ("p4client", ""), ("# P4PASSWD for the user - valid password. If blank then no login performed.", None), ("p4passwd", "")]), }) class SourceTargetTextComparison(object): """Decide if source and target servers are similar OS so that text files can be compared by size and digest (no line ending differences)""" sourceVersion = None targetVersion = None def _getOS(self, server): info = server.p4cmd("info", "-s")[0] serverVersion = info["serverVersion"] parts = serverVersion.split("/") return parts[1] def setup(self, src, targ): self.sourceVersion = self._getOS(src) self.targetVersion = self._getOS(targ) def compatible(self): if self.sourceVersion: # TODO: compare different architectures better - e.g. allow 32 vs 64 bit return self.sourceVersion == self.targetVersion return False sourceTargetTextComparison = SourceTargetTextComparison() def isText(ftype): "If filetype is not text - binary or unicode" if re.search("text", ftype): return True return False def fileContentComparisonPossible(ftype): "Decides if it is possible to compare size/digest for text files" if not isText(ftype): return True if "k" in ftype: return False return sourceTargetTextComparison.compatible() def readContents(fname): "Reads file contents appropriate according to type" flags = "rb" with open(fname, flags) as fh: contents = fh.read() return contents def writeContents(fname, contents): flags = "wb" ensureDirectory(os.path.dirname(fname)) if os.path.exists(fname): os.chmod(fname, stat.S_IWRITE) with open(fname, flags) as fh: try: fh.write(contents) except TypeError as e: fh.write(contents.encode()) def ensureDirectory(directory): if not os.path.isdir(directory): os.makedirs(directory) def getDigest(fname): "Return MD5 digest of file on disk" m = hashlib.md5() contents = readContents(fname) m.update(contents) return m.digest() def fileContentChanged(file): if fileContentComparisonPossible(file.type): fileSize = os.path.getsize(file.fixedLocalFile) digest = getDigest(file.fixedLocalFile) return (fileSize, digest) != (file.fileSize, file.digest) return False def p4time(unixtime): "Convert time to Perforce format time" return time.strftime("%Y/%m/%d:%H:%M:%S", time.localtime(unixtime)) def printSampleConfig(): "Print defaults from above dictionary for saving as a base file" config = ConfigParser(allow_no_value=True) config.optionxform = str for sec in DEFAULT_CONFIG.keys(): config.add_section(sec) for k in DEFAULT_CONFIG[sec].keys(): config.set(sec, k, DEFAULT_CONFIG[sec][k]) print("") print("# Save this output to a file to e.g. transfer.cfg and edit it for your configuration") print("") config.write(sys.stdout) def fmtsize(num): for x in ['bytes', 'KB', 'MB', 'GB', 'TB']: if num < 1024.0: return "%3.1f %s" % (num, x) num /= 1024.0 class ChangeRevision: "Represents a change - created from P4API supplied information and thus encoding" def __init__(self, rev, change, n): self.rev = rev self.action = change['action'][n] self.type = change['type'][n] self.depotFile = change['depotFile'][n] self.localFile = None self.fileSize = 0 self.digest = "" self.fixedLocalFile = None self._integrations = [] if self.action not in ['delete', 'move/delete']: if 'fileSize' in change: self.fileSize = change['fileSize'][n] if 'digest' in change: self.digest = change['digest'][n] def addIntegrationInfo(self, integ): "Add what could be more than one integration" self._integrations.append(integ) def hasIntegrations(self): return len(self._integrations) def numIntegrations(self): return len(self._integrations) def integrations(self): "Yield in reverse order so that we replay correctly" for ind, integ in reversed(list(enumerate(self._integrations))): yield ind, integ def getIntegration(self, index=0): "Latest integration" return self._integrations[index] def localFileRev(self): "Fully specify local file with rev number" return "%s#%s" % (self.localFile, self.rev) def localIntegSource(self, index=0): "Fully specify local source with start/end of revisions" return "%s#%d,%d" % (self._integrations[index].localFile, self._integrations[index].srev, self._integrations[index].erev) def localIntegSyncSource(self, index=0): "Fully specify local source with end rev for syncing" return "%s#%d" % (self._integrations[index].localFile, self._integrations[index].erev) def integSyncSource(self, index=0): "Integration source with end rev" return "%s#%d" % (self._integrations[index].file, self._integrations[index].erev) def setLocalFile(self, localFile): self.localFile = localFile localFile = localFile.replace("%40", "@") localFile = localFile.replace("%23", "#") localFile = localFile.replace("%2A", "*") localFile = localFile.replace("%25", "%") localFile = localFile.replace("/", os.sep) self.fixedLocalFile = localFile def __repr__(self): return 'rev={rev} action={action} type={type} size={size} digest={digest} depotFile={depotfile}' .format( rev=self.rev, action=self.action, type=self.type, size=self.fileSize, digest=self.digest, depotfile=self.depotFile, ) def __hash__(self): return hash(self.localFile) def __eq__(self, other): "For comparisons between source and target after transfer" if self.localFile != other.localFile: # Check filename return False # Purge means filetype +Sn - so no comparison possible if self.action == 'purge' or other.action == 'purge': return True if fileContentComparisonPossible(self.type): if (self.fileSize, self.digest) != (other.fileSize, other.digest): if self.type == 'utf16': if abs(int(self.fileSize) - int(other.fileSize)) < 5: return True return False return True class ChangelistComparer(object): "Compare two lists of filerevisions" def listsEqual(self, srclist, targlist, filesToIgnore): "Compare two lists of changes, with an ignore list" srcfiles = set([chRev.localFile for chRev in srclist if chRev.localFile not in filesToIgnore]) targfiles = set([chRev.localFile for chRev in targlist]) diffs = srcfiles.difference(targfiles) if diffs: return (False, "Replication failure: missing elements in target changelist: %s" % ", ".join([str(r) for r in diffs])) srcfiles = set(chRev for chRev in srclist if chRev.localFile not in filesToIgnore) targfiles = set(chRev for chRev in targlist) diffs = srcfiles.difference(targfiles) if diffs: targlookup = {} for chRev in targlist: targlookup[chRev.localFile] = chRev return (False, "Replication failure: src/target content differences found\nsrc:%s\ntarg:%s" % ( "\n ".join([str(r) for r in diffs]), "\n ".join([str(targlookup[r.localFile]) for r in diffs]))) return (True, "") class ReportProgress(object): "Report overall progress" def __init__(self, p4, changes, logger, workspace): self.logger = logger self.filesToSync = 0 self.changesToSync = len(changes) self.sizeToSync = 0 self.filesSynced = 0 self.changesSynced = 0 self.sizeSynced = 0 self.previousSizeSynced = 0 self.sync_progress_size_interval = None # Set to integer value to get reports self.logger.info("Syncing %d changes" % (len(changes))) for chg in changes: sizes = p4.run('sizes', '-s', '//%s/...@%s,%s' % (workspace, chg['change'], chg['change'])) self.sizeToSync += int(sizes[0]['fileSize']) self.filesToSync += int(sizes[0]['fileCount']) self.logger.info("Syncing filerevs %d, size %s" % (self.filesToSync, fmtsize(self.sizeToSync))) def SetSyncProgressSizeInterval(self, interval): "Set appropriate" if interval: self.sync_progress_size_interval = int(interval) def ReportChangeSync(self): self.changesSynced += 1 def ReportFileSync(self, fileSize): self.filesSynced += 1 self.sizeSynced += fileSize if not self.sync_progress_size_interval: return if self.sizeSynced > self.previousSizeSynced + self.sync_progress_size_interval: self.previousSizeSynced = self.sizeSynced syncPercent = 100 * float(self.filesSynced) / float(self.filesToSync) sizePercent = 100 * float(self.sizeSynced) / float(self.sizeToSync) self.logger.info("Synced %d/%d changes, files %d/%d (%2.1f %%), size %s/%s (%2.1f %%)" % ( \ self.changesSynced, self.changesToSync, self.filesSynced, self.filesToSync, syncPercent, fmtsize(self.sizeSynced), fmtsize(self.sizeToSync), sizePercent)) class P4Base(object): "Processes a config" section = None P4PORT = None P4CLIENT = None P4USER = None P4PASSWD = None counter = 0 clientLogged = 0 def __init__(self, section, options, p4id): self.section = section self.options = options self.logger = logging.getLogger(LOGGER_NAME) self.p4id = p4id self.p4 = None self.client_logged = 0 def __str__(self): return '[section = {} P4PORT = {} P4CLIENT = {} P4USER = {} P4PASSWD = {}]'.format( \ self.section, self.P4PORT, self.P4CLIENT, self.P4USER, self.P4PASSWD, ) def connect(self, progname): self.p4 = P4.P4() self.p4.port = self.P4PORT self.p4.client = self.P4CLIENT self.p4.user = self.P4USER self.p4.prog = progname self.p4.exception_level = P4.P4.RAISE_ERROR self.p4.connect() if not self.P4PASSWD == None: self.p4.password = self.P4PASSWD self.p4.run_login() clientspec = self.p4.fetch_client(self.p4.client) logOnce(self.logger, "%s:%s:%s" % (self.p4id, self.p4.client, pprint.pformat(clientspec))) self.root = clientspec._root self.clientspec = clientspec self.p4.cwd = self.root self.clientmap = P4.Map(clientspec._view) # try: # protects = self.p4cmd("protects", "//%s/..." % clientspec._client) # except P4.P4Exception as e: # # This is unlikely except for during testing so ignore it # if e.errors[0] != 'Protections table is empty.': # raise(e) ctr = P4.Map('//"'+clientspec._client+'/..." "' + clientspec._root + '/..."') self.localmap = P4.Map.join(self.clientmap, ctr) self.depotmap = self.localmap.reverse() def p4cmd(self, *args, **kwargs): "Execute p4 cmd while logging arguments and results" self.logger.debug(self.p4id, args) output = self.p4.run(args, **kwargs) self.logger.debug(self.p4id, output) self.checkWarnings() return output def disconnect(self): if self.p4: self.p4.disconnect() def checkWarnings(self): if self.p4 and self.p4.warnings: self.logger.warning('warning result: {}'.format(str(self.p4.warnings))) def resetWorkspace(self): self.p4cmd('sync', '...#none') class TrackedAdd(object): """Record class used in MoveTracker""" def __init__(self, chRev, deleteDepotFile): self.chRev = chRev self.deleteDepotFile = deleteDepotFile class MoveTracker(object): """Tracks move/add and move/delete and handles orphans where source or target isn't mapped (either outside source workspace or not visible due to permissions).""" def __init__(self, logger): self.logger = logger self.adds = {} # Key is localFile for matching delete self.deletes = {} # Key is localFile for delete def trackAdd(self, chRev, deleteDepotFile, deleteLocalFile): "Remember the move/add or move/delete so we can match them up" self.adds[deleteLocalFile] = TrackedAdd(chRev, deleteDepotFile) def trackDelete(self, chRev, localFile): "Track a move/delete" self.deletes[localFile] = chRev def getMoves(self): "Return orphaned moves, or the move/add from add/delete pairs" for localFile in self.adds: if localFile in self.deletes: self.logger.debug("Matched move add/delete '%s'" % localFile) del self.deletes[localFile] else: self.logger.debug("Action move/add changed to add") self.adds[localFile].chRev.action = "add" results = [self.adds[k].chRev for k in self.adds] for k in self.deletes: self.logger.debug("Action move/delete changed to delete") self.deletes[k].action = 'delete' results.extend([self.deletes[k] for k in self.deletes]) return results class P4Source(P4Base): "Functionality for reading from source Perforce repository" def __init__(self, section, options): super(P4Source, self).__init__(section, options, 'src') def missingChanges(self, counter): revRange = '...@{rev},#head'.format(rev=counter + 1) self.logger.debug('reading changes: ', revRange) changes = self.p4cmd('changes', '-l', revRange) self.logger.debug('found %d changes' % len(changes)) changes.reverse() if self.options.change_batch_size: changes = changes[:self.options.change_batch_size] if self.options.maximum: changes = changes[:self.options.maximum] self.logger.debug('processing %d changes' % len(changes)) return changes def getChange(self, change): """Expects change number as a string""" class SyncOutput(P4.OutputHandler): "Log sync progress" def __init__(self, progress): P4.OutputHandler.__init__(self) self.progress = progress def outputStat(self, stat): if 'fileSize' in stat: self.progress.ReportFileSync(int(stat['fileSize'])) return P4.OutputHandler.HANDLED def outputInfo(self, info): return P4.OutputHandler.HANDLED def outputMessage(self, msg): return P4.OutputHandler.HANDLED self.progress.ReportChangeSync() callback = SyncOutput(self.progress) self.p4cmd('sync', '...@{},{}'.format(change, change), handler=callback) change = self.p4cmd('describe', change)[0] filerevs = [] filesToLog = {} movetracker = MoveTracker(self.logger) for (n, rev) in enumerate(change['rev']): localFile = self.localmap.translate(change['depotFile'][n]) if localFile and len(localFile) > 0: chRev = ChangeRevision(rev, change, n) chRev.setLocalFile(localFile) if chRev.action in ('branch', 'integrate', 'add', 'delete', 'move/add'): filesToLog[chRev.depotFile] = chRev elif chRev.action == 'move/delete': movetracker.trackDelete(chRev, localFile) else: filerevs.append(chRev) fpaths = ['{}#{}'.format(x.depotFile, x.rev) for x in filesToLog.values()] if fpaths: filelogs = self.p4.run_filelog('-m1', *fpaths) if filelogs: self.logger.debug('filelogs:', filelogs) for flog in filelogs: if flog.depotFile in filesToLog: chRev = filesToLog[flog.depotFile] revision = flog.revisions[0] if len(revision.integrations) > 0: for integ in revision.integrations: if 'from' in integ.how or integ.how == "ignored": integ.localFile = self.localmap.translate(integ.file) chRev.addIntegrationInfo(integ) if chRev.action == 'move/add': self.logger.debug('integration', chRev.getIntegration()) movetracker.trackAdd(chRev, flog.depotFile, chRev.getIntegration().localFile) else: filerevs.append(chRev) else: self.logger.error(u"Failed to retrieve filelog for {}#{}".format(flog.depotFile, flog.rev)) filerevs.extend(movetracker.getMoves()) return filerevs class P4Target(P4Base): "Functionality for transferring changes to target Perforce repository" def __init__(self, section, options, src): super(P4Target, self).__init__(section, options, 'targ') self.src = src self.resolveDeleteEncountered = False self.re_cant_integ_without_Di = re.compile("can't integrate from .* or use -Di to disregard move") self.re_cant_integ_without_d = re.compile("can't delete from .* without -d or -Ds flag") self.re_cant_integ_without_i = re.compile(r" can't integrate .* without -i flag") self.re_cant_branch_without_Dt = re.compile(r" can't branch from .* without -d or -Dt flag") self.re_resolve_skipped = re.compile(" \- resolve skipped.") self.re_resolve_tampered = re.compile(" tampered with before resolve - edit or revert") self.re_edit_of_deleted_file = re.compile(r"warning: edit of deleted file") self.re_all_revisions_already_integrated = re.compile(" all revision\(s\) already integrated") self.re_delete_not_on_client = re.compile("\- file\(s\) not on client") self.re_move_delete_needs_move_add = re.compile("move/delete\(s\) must be integrated along with matching move/add\(s\)") self.re_file_remapped = re.compile(" \(remapped from ") self.filesToIgnore = [] def formatChangeDescription(self, **kwargs): """Format using specified format options - see call in replicateChange""" format = self.options.change_description_format format = format.replace("\\n", "\n") t = Template(format) result = t.safe_substitute(**kwargs) return result def processChangeRevs(self, filerevs): "Process all revisions in the change" for f in filerevs: self.logger.debug('targ:', f) if not self.options.preview: if f.action == 'edit': self.logger.debug('processing:0010 edit') self.p4cmd('sync', '-k', f.localFile) self.p4cmd('edit', '-t', f.type, f.localFile) elif f.action == 'add' or f.action == 'import': if f.hasIntegrations(): self.replicateBranch(f, dirty=True) else: self.logger.debug('processing:0020 add') self.p4cmd('add', '-ft', f.type, f.fixedLocalFile) elif f.action == 'delete': if f.hasIntegrations(): self.replicateIntegration(f) else: self.logger.debug('processing:0030 delete') self.replicateDelete(f) elif f.action == 'purge': # special case. Type of file is +S, and source.sync removed the file # create a temporary file, it will be overwritten again later self.logger.debug('processing:0040 purge') writeContents(f.fixedLocalFile, 'purged file') self.p4cmd('sync', '-k', f.localFile) self.p4cmd('edit', '-t', f.type, f.localFile) if self.p4.warnings: self.p4cmd('add', '-ft', f.type, f.fixedLocalFile) elif f.action == 'branch': self.replicateBranch(f, dirty=False) elif f.action == 'integrate': self.replicateIntegration(f) elif f.action == 'move/add': self.moveAdd(f) elif f.action == 'archive': self.logger.warning("Ignoring archived revision: %s#%s" % (f.depotFile, f.rev)) self.filesToIgnore.append(f.localFile) else: raise P4TLogicException('Unknown action: %s for %s' % (f.action, str(f))) def replicateChange(self, filerevs, change, sourcePort): """This is the heart of it all. Replicate all changes according to their description""" self.renameOfDeletedFileEncountered = False self.resolveDeleteEncountered = False self.filesToIgnore = [] self.processChangeRevs(filerevs) newChangeId = None opened = self.p4cmd('opened') if len(opened) > 0: description = self.formatChangeDescription(sourceDescription=change['desc'], sourceChange=change['change'], sourcePort=sourcePort, sourceUser=change['user']) if self.options.nokeywords: self.removeKeywords(opened) result = None try: result = self.p4cmd('submit', '-d', description) except P4.P4Exception as e: re_resubmit = re.compile("Out of date files must be resolved or reverted.\n.*p4 submit -c ([0-9]+)") m = re_resubmit.search(self.p4.errors[0]) if m and (self.renameOfDeletedFileEncountered or self.resolveDeleteEncountered): self.p4cmd("sync") result = self.p4cmd("submit", "-c", m.group(1)) else: raise e # the submit information can be followed by refreshFile lines # need to go backwards to find submittedChange a = -1 while 'submittedChange' not in result[a]: a -= 1 newChangeId = result[a]['submittedChange'] self.updateChange(change, newChangeId) self.reverifyRevisions(result) self.logger.info("source = {} : target = {}".format(change['change'], newChangeId)) self.validateSubmittedChange(newChangeId, filerevs) return newChangeId def validateSubmittedChange(self, newChangeId, srcFileRevs): "Check against what was passed in" movetracker = MoveTracker(self.logger) targFileRevs = [] filesToLog = {} if newChangeId: change = self.p4cmd('describe', newChangeId)[0] for (n, rev) in enumerate(change['rev']): localFile = self.localmap.translate(change['depotFile'][n]) if localFile and len(localFile) > 0: chRev = ChangeRevision(rev, change, n) chRev.setLocalFile(localFile) if chRev.action == 'move/add': filesToLog[chRev.depotFile] = chRev elif chRev.action == 'move/delete': movetracker.trackDelete(chRev, localFile) else: targFileRevs.append(chRev) fpaths = ['{}#{}'.format(x.depotFile, x.rev) for x in filesToLog.values()] if fpaths: filelogs = self.p4.run_filelog('-m1', *fpaths) if filelogs: self.logger.debug('filelogs:', filelogs) for flog in filelogs: chRev = filesToLog[flog.depotFile] revision = flog.revisions[0] if len(revision.integrations) > 0: for integ in revision.integrations: if 'from' in integ.how or integ.how == "ignored": integ.localFile = self.localmap.translate(integ.file) chRev.addIntegrationInfo(integ) break movetracker.trackAdd(chRev, flog.depotFile, chRev.getIntegration().localFile) cc = ChangelistComparer() targFileRevs.extend(movetracker.getMoves()) result = cc.listsEqual(srcFileRevs, targFileRevs, self.filesToIgnore) if not result[0]: raise P4TLogicException(result[1]) def moveAdd(self, file): "Either paired with a move/delete or an orphaned move/add" self.logger.debug('processing:0100 move/add') if file.getIntegration() and file.getIntegration().localFile: source = file.getIntegration().localFile self.p4cmd('sync', '-f', file.localIntegSyncSource()) output = self.p4cmd('edit', source) if len(output) > 1 and self.re_edit_of_deleted_file.search(output[-1]): self.renameOfDeletedFileEncountered = True if os.path.exists(file.fixedLocalFile): self.p4cmd('move', '-k', source, file.localFile) else: self.p4cmd('move', source, file.localFile) if fileContentChanged(file): self.logger.warning('Resyncing source due to file content changes') self.src.p4cmd('sync', '-f', file.localFileRev()) else: self.p4cmd('add', '-ft', file.type, file.fixedLocalFile) def updateChange(self, change, newChangeId): # need to update the user and time stamp newChange = self.p4.fetch_change(newChangeId) newChange._user = change['user'] # date in change is in epoch time, we need it in canonical form newDate = datetime.utcfromtimestamp(int(change['time'])).strftime("%Y/%m/%d %H:%M:%S") newChange._date = newDate self.p4.save_change(newChange, '-f') def reverifyRevisions(self, result): revisionsToVerify = ["{file}#{rev},{rev}".format(file=x['refreshFile'], rev=x['refreshRev']) for x in result if 'refreshFile' in x] if revisionsToVerify: self.p4cmd('verify', '-qv', revisionsToVerify) def removeKeywords(self, opened): for openFile in opened: if self.hasKeyword(openFile["type"]): fileType = self.removeKeyword(openFile["type"]) self.p4cmd('reopen', '-t', fileType, openFile["depotFile"]) self.logger.debug("targ: Changed type from {} to {} for {}". format(openFile["type"], fileType, openFile["depotFile"])) KTEXT = re.compile("(.*)\+([^k]*)k([^k]*)") def hasKeyword(self, fileType): return(fileType in ["ktext", "kxtext"] or self.KTEXT.match(fileType)) def removeKeyword(self, fileType): if fileType == "ktext": newType = "text" elif fileType == "kxtext": newType = "xtext" else: m = self.KTEXT.match(fileType) newType = m.group(1) + "+" + m.group(2) + m.group(3) return newType def replicateDelete(self, file): """Deletes are generally easy but the edge case is a delete on top of a delete""" self.p4cmd('delete', '-v', file.localFile) if not self.p4.warnings or not self.re_delete_not_on_client.search("\n".join(self.p4.warnings)): return self.p4cmd('sync', '-k', "%s#1" % file.localFile) self.p4cmd('delete', '-v', file.localFile) self.resolveDeleteEncountered = True def replicateBranch(self, file, dirty=False): # An integration where source has been obliterated will not have integrations self.logger.debug('replicateBranch') if not self.options.ignore and file.hasIntegrations() and file.getIntegration().localFile: afterAdd = False for ind, integ in file.integrations(): # With the above in reverse order, we expect any add to occur first if file.getIntegration(ind).how == 'add from': self.logger.debug('processing:0200 add from') afterAdd = True if (file.getIntegration(ind).localFile == file.localFile) or \ (file.numIntegrations() > 1): self.doIntegrate(file.localIntegSource(ind), file.localFile) self.p4cmd('add', '-ft', file.type, file.fixedLocalFile) else: # "add from" is rather an odd beast - recreate as move after back out of delete self.p4cmd('sync', file.localIntegSyncSource(ind)) self.p4cmd('add', file.getIntegration(ind).localFile) os.chmod(file.fixedLocalFile, stat.S_IWRITE) os.remove(file.fixedLocalFile) self.p4cmd('move', file.getIntegration(ind).localFile, file.fixedLocalFile) if fileContentChanged(file): self.src.p4cmd('sync', '-f', file.localFileRev()) elif afterAdd: self.logger.debug('processing:0210 other integrates') self.replicateIntegration(file, afterAdd=afterAdd, startInd=ind) else: self.logger.debug('processing:0220 integrate') if ind > 0: flags = [] else: flags = ['-v'] outputDict = self.doIntegrate(file.localIntegSource(ind), file.localFile, flags=flags) edited = False added = False afterAdd = True # This will fire further integrations if outputDict and 'action' in outputDict and outputDict['action'] == 'delete': self.p4cmd('resolve', '-at', file.localFile) self.p4cmd('add', file.localFile) edited = True added = True if dirty and not edited: self.p4cmd('edit', file.localFile) edited = True # Only if last integration to be processed for this rev and it is an add if added or (ind == 0 and outputDict and outputDict['action'] == 'branch' and \ self.integrateContentsChanged(file)): if not edited: self.p4cmd('edit', file.localFile) self.src.p4cmd('sync', '-f', file.localFileRev()) else: self.logger.debug('processing:0230 add') self.p4cmd('add', '-ft', file.type, file.fixedLocalFile) if fileContentChanged(file): self.logger.warning('Resyncing add due to file content changes') self.src.p4cmd('sync', '-f', file.localFileRev()) def integrateContentsChanged(self, file): "Is the source of integrated different to target" fileSize, digest = 0, "" if fileContentComparisonPossible(file.type): filelog = self.src.p4cmd('filelog', '-m1', file.integSyncSource()) if filelog and 'digest' in filelog[0] and 'fileSize' in filelog[0]: fileSize = filelog[0]['fileSize'][0] digest = filelog[0]['digest'][0] return (fileSize, digest) != (file.fileSize, file.digest) else: return False return True def editFrom(self, file, contents, afterAdd=False): "Run merge with edit - if required" if not afterAdd: self.p4cmd('sync', '-f', file.localFile) # to avoid tamper checking self.doIntegrate(file.localIntegSource(), file.localFile) class MyResolver(P4.Resolver): "Local resolver to accept edits on merge" def __init__(self, contents, ftype): self.contents = contents self.ftype = ftype def resolve(self, mergeData): writeContents(mergeData.result_path, self.contents) return 'ae' self.p4.run_resolve(resolver=MyResolver(contents, file.type)) self.logger.debug('resolve -ae') def integrateWithFlags(self, srcname, destname, flags): "integrate which can be repeated with flags where necessary" resultStr = "" resultDict = {} try: cmd = ["integrate"] cmd.extend(flags) cmd.append(srcname) cmd.append(destname) output = self.p4cmd(cmd) if output: if isinstance(output[0], dict): resultDict = output[0] else: resultStr = output[0] except P4.P4Exception: resultStr += "\n".join(self.p4.errors) resultStr += "\n".join(self.p4.warnings) return (resultDict, resultStr) def doIntegrate(self, srcname, destname, flags=None): "Perform integrate" if flags is None: flags = [] outputDict = {} while 1 == 1: outputDict, outputStr = self.integrateWithFlags(srcname, destname, flags) if self.re_cant_integ_without_i.search(outputStr) and "-i" not in flags: flags.append("-i") elif self.re_cant_integ_without_d.search(outputStr) and "-d" not in flags: flags.append("-d") elif self.re_all_revisions_already_integrated.search(outputStr) and "-f" not in flags: flags.append("-f") elif self.re_cant_integ_without_Di.search(outputStr) and "-Di" not in flags: flags.append('-Di') elif self.re_cant_branch_without_Dt.search(outputStr) and "-Dt" not in flags: flags.append('-Dt') elif self.re_file_remapped.search(outputStr) and "-2" not in flags: flags.append("-2") elif self.re_all_revisions_already_integrated.search(outputStr) and "-f" in flags: # Can't integrate a delete on to a delete self.logger.warning("Ignoring integrate:", destname) self.filesToIgnore.append(destname) break else: break return outputDict def integrateDelete(self, srcFile, srcInd, destname): "Handles all deletes" self.logger.debug('processing:0260 delete') flags = [] doCopy = False while 1 == 1: outputDict, outputStr = self.integrateWithFlags(srcFile.localIntegSource(srcInd), destname, flags) if self.re_cant_integ_without_d.search(outputStr) and "-d" not in flags: flags.append("-d") elif self.re_cant_integ_without_Di.search(outputStr) and "-Di" not in flags: flags.append('-Di') elif self.re_cant_branch_without_Dt.search(outputStr) and "-Dt" not in flags: flags.append('-Dt') elif self.re_all_revisions_already_integrated.search(outputStr) and "-f" not in flags: flags.append("-f") elif self.re_move_delete_needs_move_add.search(outputStr): doCopy = True break else: break if doCopy: self.logger.debug('processing:0270 copy of delete') self.p4cmd('copy', srcFile.localIntegSyncSource(srcInd), destname) else: self.logger.debug('processing:0280 delete') self.p4cmd('resolve', '-at') def replicateIntegration(self, file, afterAdd=False, startInd=None): self.logger.debug('replicateIntegration') if not self.options.ignore and file.hasIntegrations() and file.getIntegration().localFile: if startInd is None: startInd = file.numIntegrations() for ind, integ in file.integrations(): if ind > startInd: continue if integ.how == 'add from': assert(afterAdd) continue # We ignore these self.logger.debug('processing:0300 integ:', integ.how) if integ.how == 'edit from': self.logger.debug('processing:0305 edit from') if afterAdd: # Resync source version and then do resolve -ae self.src.p4cmd('sync', '-f', file.localFileRev()) contents = readContents(file.fixedLocalFile) self.editFrom(file, contents, afterAdd=afterAdd) elif integ.how in ('delete', 'delete from'): self.integrateDelete(file, ind, file.localFile) else: self.logger.debug('processing:0310 integrate') editedFrom = False if not afterAdd and file.action != 'delete': self.p4cmd('sync', '-f', file.localFile) # to avoid tamper checking flags = [] if file.action == 'delete' and integ.how == 'ignored': flags = ['-Rb'] self.doIntegrate(file.localIntegSource(ind), file.localFile, flags) if integ.how == 'copy from': self.logger.debug('processing:0320 copy from') self.p4cmd('resolve', '-at') if fileContentChanged(file) and not editedFrom: self.logger.warning('File copied but content changed') self.src.p4cmd('sync', '-f', file.localFileRev()) elif integ.how == 'ignored': self.logger.debug('processing:0330 ignored') self.p4cmd('resolve', '-ay') if file.action != 'delete' and fileContentChanged(file) and ind == 0: # Strange but possible in older servers - but only handled for last integrate # in the bunch, hence ind==0 self.p4cmd('edit', file.localFile) self.src.p4cmd('sync', '-f', file.localFileRev()) elif integ.how == 'merge from': self.logger.debug('processing:0350 merge from') resolve_result = "" try: output = self.p4cmd('resolve', '-am') resolve_result = output[-1] except P4.P4Exception: resolve_result += "\n".join(self.p4.warnings) resolve_result += "\n".join(self.p4.errors) if self.re_resolve_skipped.search(str(resolve_result)): self.logger.warning('Merge from downgraded to edit from due to resolve problems') # Resync source version and then do resolve -ae self.src.p4cmd('sync', '-f', file.localFileRev()) contents = readContents(file.fixedLocalFile) if not afterAdd: self.p4cmd('revert', file.localFile) self.editFrom(file, contents, afterAdd=afterAdd) editedFrom = True elif self.re_resolve_tampered.search(str(resolve_result)): self.p4cmd('edit', file.localFile) self.src.p4cmd('sync', '-f', file.localFileRev()) else: self.logger.error('Unexpected resolve error: %s' % resolve_result) # Validate filesize and md5 digest if fileContentChanged(file) and not editedFrom: self.logger.warning('Merge from downgraded to edit from due to file content changes') # Resync source version and then do resolve -ae self.src.p4cmd('sync', '-f', file.localFileRev()) contents = readContents(file.fixedLocalFile) if not afterAdd: self.p4cmd('revert', file.localFile) self.editFrom(file, contents) else: self.logger.error('Cannot deal with {}'.format(integ)) else: self.logger.debug('processing:0360 ignore integrations') if file.hasIntegrations() and file.getIntegration().how in ('delete', 'delete from'): self.logger.debug('processing:0370 delete') self.p4cmd('delete', '-v', file.localFile) elif file.action == 'delete' and file.hasIntegrations() and not file.getIntegration().localFile: # We don't attempt to transfer files with a delete revision done by an integration # that is not being transferred. self.logger.warning("Ignoring deleted revision: %s#%s" % (file.depotFile, file.rev)) self.filesToIgnore.append(file.localFile) else: self.logger.debug('processing:0380 else') self.p4cmd('sync', '-k', file.localFile) self.p4cmd('edit', file.localFile) if fileContentChanged(file): self.src.p4cmd('sync', '-f', file.localFileRev()) def getCounter(self): "Returns value of counter as integer" result = self.p4cmd('counter', self.options.counter_name) if result and 'counter' in result[0]: return int(result[0]['value']) return 0 def setCounter(self, value): "Set's the counter to specified value" self.p4cmd('counter', self.options.counter_name, str(value)) def initChangeMapFile(self): "Initializes the file - once" if not self.options.change_map_file: return fpath = os.path.join(self.root, self.options.change_map_file) depotFiles = self.p4cmd('fstat', fpath) createFile = False if depotFiles: if not os.path.exists(fpath): self.p4cmd('sync', fpath) else: createFile = True if createFile: with open(fpath, "a") as fh: fh.write("sourceP4Port,sourceChangeNo,targetChangeNo\n") def updateChangeMap(self, sourceP4Port, sourceChangeNo, targetChangeNo): "Store values" if not self.options.change_map_file: return fpath = os.path.join(self.root, self.options.change_map_file) if os.path.exists(fpath): os.chmod(fpath, stat.S_IWRITE) with open(fpath, "a") as fh: fh.write("%s,%s,%s\n" % (sourceP4Port, sourceChangeNo, targetChangeNo)) def submitChangeMap(self): if not self.options.change_map_file: return self.logger.debug("Submitting change_map_file") chg = self.p4.fetch_change() fpath = os.path.join(self.root, self.options.change_map_file) chg['Description'] = "Updated change_map_file" output = self.p4.save_change(chg)[0] m = re.search("Change (\d+) created", output) if not m: raise P4TException("Failed to create changelist") chgno = m.group(1) output = self.p4cmd('reconcile', '-c', chgno, fpath)[0] if output['action'] == 'add': self.p4cmd('reopen', '-t', 'text+CS32', fpath) self.p4cmd('submit', '-c', chgno) class P4Transfer(object): "Main transfer class" def __init__(self, *args): parser = argparse.ArgumentParser( description="P4Transfer", epilog="Copyright (C) 2012-14 Sven Erik Knop/Robert Cowham, Perforce Software Ltd" ) parser.add_argument('-n', '--preview', action='store_true', help="Preview only, no transfer") parser.add_argument('-c', '--config', default=CONFIG, help="Default is " + CONFIG) parser.add_argument('-m', '--maximum', default=None, type=int, help="Maximum number of changes to transfer") parser.add_argument('-k', '--nokeywords', action='store_true', help="Do not expand keywords and remove +k from filetype") parser.add_argument('-p', '--preflight', action='store_true', help="Run a sanity check first to ensure target is empty") parser.add_argument('-r', '--repeat', action='store_true', help="Repeat transfer in a loop - for continuous transfer") parser.add_argument('-s', '--stoponerror', action='store_true', help="Stop on any error even if --repeat has been specified") parser.add_argument('--sample-config', action='store_true', help="Print an example config file and exit") parser.add_argument('-i', '--ignore', action='store_true', help="Treat integrations as adds and edits") self.options = parser.parse_args(list(args)) self.options.sync_progress_size_interval = None self.logger = logutils.getLogger(LOGGER_NAME) self.previous_target_change_counter = 0 # Current value def getOption(self, section, option_name, default=None): result = default try: result = self.parser.get(section, option_name) except: pass return result def getIntOption(self, section, option_name, default=None): result = default strval = self.getOption(section, option_name, default) if strval: try: result = int(eval(strval)) except: pass return result def readConfig(self): self.parser = ConfigParser() self.options.parser = self.parser # for later use try: with open(self.options.config) as f: if python3: self.parser.read_file(f) else: self.parser.readfp(f) except Exception as e: raise P4TConfigException('Could not read %s: %s' % (self.options.config, str(e))) self.options.counter_name = self.getOption(GENERAL_SECTION, "counter_name") if not self.options.counter_name: raise P4TConfigException("Option counter_name in the [general] section must be specified") self.options.instance_name = self.getOption(GENERAL_SECTION, "instance_name", self.options.counter_name) self.options.mail_form_url = self.getOption(GENERAL_SECTION, "mail_form_url") self.options.mail_to = self.getOption(GENERAL_SECTION, "mail_to") self.options.mail_from = self.getOption(GENERAL_SECTION, "mail_from") self.options.mail_server = self.getOption(GENERAL_SECTION, "mail_server") self.options.sleep_on_error_interval = self.getIntOption(GENERAL_SECTION, "sleep_on_error_interval", 60) self.options.poll_interval = self.getIntOption(GENERAL_SECTION, "poll_interval", 60) self.options.change_batch_size = self.getIntOption(GENERAL_SECTION, "change_batch_size", 20000) self.options.report_interval = self.getIntOption(GENERAL_SECTION, "report_interval", 30) self.options.error_report_interval = self.getIntOption(GENERAL_SECTION, "error_report_interval", 30) self.options.summary_report_interval = self.getIntOption(GENERAL_SECTION, "summary_report_interval", 10080) self.options.sync_progress_size_interval = self.getIntOption(GENERAL_SECTION, "sync_progress_size_interval") self.options.change_description_format = self.getOption(GENERAL_SECTION, "change_description_format", "$sourceDescription\n\nTransferred from p4://$sourcePort@$sourceChange") self.options.change_map_file = self.getOption(GENERAL_SECTION, "change_map_file", "") self.source = P4Source(SOURCE_SECTION, self.options) self.target = P4Target(TARGET_SECTION, self.options, self.source) self.readSection(self.source) self.readSection(self.target) def readSection(self, p4config): if self.parser.has_section(p4config.section): self.readOptions(p4config) else: raise P4TConfigException('Config file needs section %s' % p4config.section) def readOptions(self, p4config): self.readOption('P4CLIENT', p4config) self.readOption('P4USER', p4config) self.readOption('P4PORT', p4config) self.readOption('P4PASSWD', p4config, optional=True) def readOption(self, option, p4config, optional=False): if self.parser.has_option(p4config.section, option): p4config.__dict__[option] = self.parser.get(p4config.section, option) elif not optional: raise P4TConfigException('Required option %s not found in section %s' % (option, p4config.section)) def replicate_changes(self): "Perform a replication loop" self.source.connect('source replicate') self.target.connect('target replicate') changes = self.source.missingChanges(self.target.getCounter()) self.logger.info("Transferring %d changes" % len(changes)) if len(changes) > 0: self.target.initChangeMapFile() self.save_previous_target_change_counter() self.source.progress = ReportProgress(self.source.p4, changes, self.logger, self.source.P4CLIENT) self.source.progress.SetSyncProgressSizeInterval(self.options.sync_progress_size_interval) for change in changes: msg = 'Processing change: {} "{}"'.format(change['change'], change['desc'].strip()) self.logger.info(msg) filerevs = self.source.getChange(change['change']) targetChange = self.target.replicateChange(filerevs, change, self.source.p4.port) self.target.setCounter(change['change']) self.target.updateChangeMap(self.source.p4.port, change['change'], targetChange) # Tidy up the workspaces after successful transfer self.source.p4cmd("sync", "...#0") with self.target.p4.at_exception_level(P4.P4.RAISE_NONE): self.target.p4cmd('sync', "...#0") self.target.submitChangeMap() self.source.disconnect() self.target.disconnect() return len(changes) def log_exception(self, e): "Log exceptions appropriately" etext = str(e) if re.search("WSAETIMEDOUT", etext, re.MULTILINE) or re.search("WSAECONNREFUSED", etext, re.MULTILINE): self.logger.error(etext) else: self.logger.exception(e) def save_previous_target_change_counter(self): "Save the latest change transferred to the target" chg = self.target.p4cmd('changes', '-m1', '-ssubmitted', '//{client}/...'.format(client=self.target.P4CLIENT)) if chg: self.previous_target_change_counter = int(chg[0]['change']) + 1 def send_summary_email(self, time_last_summary_sent, change_last_summary_sent): "Send an email summarising changes transferred" time_str = p4time(time_last_summary_sent) self.target.connect('target replicate') # Combine changes reported by time or since last changelist transferred changes = self.target.p4cmd('changes', '-l', '//{client}/...@{rev},#head'.format( client=self.target.P4CLIENT, rev=time_str)) chgnums = [chg['change'] for chg in changes] counter_changes = self.target.p4cmd('changes', '-l', '//{client}/...@{rev},#head'.format( client=self.target.P4CLIENT, rev=change_last_summary_sent)) for chg in counter_changes: if chg['change'] not in chgnums: changes.append(chg) changes.reverse() lines = [] lines.append(["Date", "Time", "Changelist", "File Revisions", "Size (bytes)", "Size"]) total_changes = 0 total_rev_count = 0 total_file_sizes = 0 for chg in changes: sizes = self.target.p4cmd('sizes', '-s', '//%s/...@%s,%s' % (self.target.P4CLIENT, chg['change'], chg['change'])) lines.append([time.strftime("%Y/%m/%d", time.localtime(int(chg['time']))), time.strftime("%H:%M:%S", time.localtime(int(chg['time']))), chg['change'], sizes[0]['fileCount'], sizes[0]['fileSize'], fmtsize(int(sizes[0]['fileSize']))]) total_changes += 1 total_rev_count += int(sizes[0]['fileCount']) total_file_sizes += int(sizes[0]['fileSize']) lines.append([]) lines.append(['Totals', '', str(total_changes), str(total_rev_count), str(total_file_sizes), fmtsize(total_file_sizes)]) report = "Changes transferred since %s\n%s" % (time_str, "\n".join(["\t".join(line) for line in lines])) self.logger.debug("Transfer summary report:\n%s" % report) self.logger.info("Sending Transfer summary report") self.logger.notify("Transfer summary report", report, include_output=False) self.save_previous_target_change_counter() self.target.disconnect() def runPreflightCheck(self): "Validates target files are not present - valid for first run only" print("Running pre-flight check first ...") targetFiles = self.target.p4cmd('fstat', '-T clientFile', '...') sourceFiles = self.source.p4cmd('fstat', '-T clientFile', '...') for f in targetFiles: if f in sourceFiles: depotFile = self.target.p4cmd('fstat', f['clientFile'])[0] raise P4TConfigException("Failed pre-flight check, file '{}' in source and target".format(depotFile['depotFile'])) print("Finished pre-flight check ...") def validateClientWorkspaces(self): if not self.source.root == self.target.root: raise P4TConfigException('source and target server workspace root directories must be the same') src = set([m.replace("//%s/" % self.source.P4CLIENT, "") for m in self.source.clientmap.rhs()]) targ = set([m.replace("//%s/" % self.target.P4CLIENT, "") for m in self.target.clientmap.rhs()]) diffs = src.difference(targ) if diffs: raise P4TConfigException("Configuration failure: workspace mappings have different right hand sides: %s" % ", ".join([str(r) for r in diffs])) if self.source.clientspec["LineEnd"] != "unix" or self.target.clientspec["LineEnd"] != "unix": raise P4TConfigException("Source and target workspaces must have LineEnd set to 'unix'") def setupReplicate(self): "Read config file and setup" self.readConfig() self.source.connect('source replicate') self.target.connect('target replicate') self.logger.debug("connected to source and target") sourceTargetTextComparison.setup(self.source, self.target) self.validateClientWorkspaces() def writeLogHeader(self): "Write header info to log" logOnce(self.logger, VERSION) logOnce(self.logger, "Python ver: 0x%08x, OS: %s" % (sys.hexversion, sys.platform)) logOnce(self.logger, "P4Python ver: %s" % (P4.P4.identify())) logOnce(self.logger, "Reading config file") def rotateLogFile(self): "Rotate existing log file" self.logger.info("Rotating logfile") logutils.resetLogger(LOGGER_NAME) global alreadyLogged alreadyLogged = {} self.writeLogHeader() def replicate(self): """Central method that performs the replication between server1 and server2""" if self.options.sample_config: printSampleConfig() return 0 try: self.writeLogHeader() self.setupReplicate() except Exception as e: self.log_exception(e) logging.shutdown() return 1 if self.options.preflight: self.runPreflightCheck() time_last_summary_sent = time.time() change_last_summary_sent = 0 self.logger.debug("Time last summary sent: %s" % p4time(time_last_summary_sent)) time_last_error_occurred = 0 error_encountered = False # Flag to indicate error encountered which may require reporting error_notified = False finished = False while not finished: try: self.readConfig() # Read every time to allow user to change them self.logger.setReportingOptions(instance_name=self.options.instance_name, mail_form_url=self.options.mail_form_url, mail_to=self.options.mail_to, mail_from=self.options.mail_from, mail_server=self.options.mail_server, report_interval=self.options.report_interval) logOnce(self.logger, self.source.options) logOnce(self.logger, self.target.options) self.source.disconnect() self.target.disconnect() num_changes = self.replicate_changes() if num_changes > 0: self.logger.info("Transferred %d changes successfully" % num_changes) if change_last_summary_sent == 0: change_last_summary_sent = self.previous_target_change_counter if self.options.change_batch_size and num_changes >= self.options.change_batch_size: self.logger.info("Finished processing batch of %d changes" % self.options.change_batch_size) self.rotateLogFile() elif not self.options.repeat: finished = True else: if error_encountered: self.logger.info("Logging - reset error interval") self.logger.notify("Cleared error", "Previous error has now been cleared") error_encountered = False error_notified = False if time.time() - time_last_summary_sent > self.options.summary_report_interval * 60: time_last_summary_sent = time.time() self.send_summary_email(time_last_summary_sent, change_last_summary_sent) time.sleep(self.options.poll_interval * 60) self.logger.info("Sleeping for %d minutes" % self.options.poll_interval) except P4TException as e: self.log_exception(e) self.logger.notify("Error", "Logic Exception encountered - stopping") logging.shutdown() return 1 except Exception as e: self.log_exception(e) if self.options.stoponerror: self.logger.notify("Error", "Exception encountered and --stoponerror specified") logging.shutdown() return 1 else: # Decide whether to report an error if not error_encountered: error_encountered = True time_last_error_occurred = time.time() elif not error_notified: if time.time() - time_last_error_occurred > self.options.error_report_interval * 60: error_notified = True self.logger.info("Logging - Notifying recurring error") self.logger.notify("Recurring error", "Multiple errors seen") self.logger.info("Sleeping on error for %d minutes" % self.options.sleep_on_error_interval) time.sleep(self.options.sleep_on_error_interval * 60) self.logger.notify("Changes transferred", "Completed successfully") logging.shutdown() return 0 if __name__ == '__main__': result = 0 try: prog = P4Transfer(*sys.argv[1:]) result = prog.replicate() except Exception as e: print(str(e)) result = 1 sys.exit(result)
# | Change | User | Description | Committed | |
---|---|---|---|---|---|
#113 | 27669 | Robert Cowham |
Retired (deleted) this version of the script - and include a reference to its replacement: https://github.com/perforce/p4transfer |
||
#112 | 26685 | Robert Cowham | Run 'describe -s' instead of just 'describe' - better performance. | ||
#111 | 26350 | Robert Cowham | Handle move/delete where previous rev is obliterated | ||
#110 | 26349 | Robert Cowham | Handle move/add where source obliterated | ||
#109 | 26344 | Robert Cowham | New parameter --end-datetime added to allow unattended runs within a window of opportunity. | ||
#108 | 26276 | Robert Cowham | Fix problem with replicating basic undo | ||
#107 | 23271 | Robert Cowham |
Allow transfers to be done by non-superusers (only requiring write and review privilege). In non-super mode, changelist date/time and author fields are not updated. |
||
#106 | 22794 | Robert Cowham | Allow change_map to be in a subdir | ||
#105 | 22524 | Robert Cowham | Handle missing fields on describe for purged revisions | ||
#104 | 22515 | Robert Cowham | Removed preview option which was confusing as it wasn't well tested. | ||
#103 | 22466 | Robert Cowham | Improved preview option handling - with test. | ||
#102 | 22465 | Robert Cowham |
Added tests for r99.2 created depot (no filesize/digest stored) Fixed problem with preview failing comparison Add --p4d option to tests to use different p4d version |
||
#101 | 22280 | Robert Cowham | Use old style syntax for submit | ||
#100 | 22278 | Robert Cowham | Fix problem transferring into 2012.2 server - need to convert types to canonical type | ||
#99 | 19691 | Robert Cowham |
Fix problems with selective integrations (cherry picking). Has changed behaviour of a few tests. |
||
#98 | 18701 | Robert Cowham | Handle the case of an edit after an archived version. | ||
#97 | 16568 | Robert Cowham |
Require clobber option in both source/target to avoid sync errors. As a result need to improve change_map.csv handling. |
||
#96 | 16425 | Robert Cowham | Remove preflight option and add in test for maximum option. | ||
#95 | 16031 | Robert Cowham | Handle large files when creating md5 checksums of contents | ||
#94 | 15827 | Robert Cowham |
Avoid unnecessary resyncs of edited files due to incorrect digest calculation and comparison. Update failing test. |
||
#93 | 15585 | Robert Cowham | Additional fix to previous one for edge case of different file types (text vs binary). | ||
#92 | 15529 | Robert Cowham | Fix problem when integrating cherry pick with different filetype. | ||
#91 | 14139 | Robert Cowham | Fix strange problem with copy from & branch from to same revision. | ||
#90 | 13959 | Robert Cowham |
Changed utf16 sync problem handling to raise an exception recommending a retype and linking to KB: http://answers.perforce.com/articles/KB/3117 |
||
#89 | 13929 | Robert Cowham | Oops - rushed previous fix - all test pass now. | ||
#88 | 13921 | Robert Cowham |
Handle unsyncable utf16 files: - note the sync errors - add them to ignore list - if and edit fails and previous version was utf16 then convert to add |
||
#87 | 13892 | Robert Cowham |
Extra check to resync if edit fails. No test (repro) as yet - still investigating... |
||
#86 | 13858 | Robert Cowham | Log progress of sync calls, and in particular output warning messages. | ||
#85 | 13704 | Robert Cowham |
Improve explanation in --sample-config option Make --sample-config work when piping stdout to a file (for python2 at least!). |
||
#84 | 13576 | Robert Cowham | Fix for when filetypes are not integrated on purpose. | ||
#83 | 12944 | Robert Cowham | Handle symlinks on Unix when run with Python3 (unicode) | ||
#82 | 12941 | Robert Cowham |
Make sure filetype changes are propagated via integrate even when integ.engine=2. Fix a couple of tests failing on OS X. |
||
#81 | 12570 | Robert Cowham | Fix link handling problem. | ||
#80 | 12549 | Robert Cowham | Handle symlinks (on Unix at least) | ||
#79 | 12548 | Robert Cowham | Optimize slightly by checking whole file first before doing line by line search for RCS keywords. | ||
#78 | 12547 | Robert Cowham | Handle RCS keyword files which have been merged in a dodgy manner (what should be a dirty merge pretending to be a clean merge). | ||
#77 | 12532 | Robert Cowham | Handle edit failing when doing old style rename. | ||
#76 | 12524 | Robert Cowham | Handled integrating a delete ontop of a deleted. | ||
#75 | 12495 | Robert Cowham | Fix problem of missing move/add records where source was outside transfer workspace. | ||
#74 | 12489 | Robert Cowham | Handle the multiple overlapping integrates with strange resolve -ay | ||
#73 | 12472 | Robert Cowham | Steps along the way to testing a move from outside. | ||
#72 | 12409 | Robert Cowham | Handle utf16 with faulty BOM | ||
#71 | 12358 | Robert Cowham |
Put in a pointer to form_mail just in case useful. Possibly fix summarise shell. |
||
#70 | 11882 | Robert Cowham |
Test for new revision with 2 integrates into it, including basic copy. Reduce number of forced integrates if there are multiple. |
||
#69 | 11881 | Robert Cowham | Handled a multiple integrate including a copy after which the file was modified. | ||
#68 | 11863 | Robert Cowham |
Use 'unix' instead of 'share' in transfer client line-endings. To avoid issues with MD5 checksums and the like. |
||
#67 | 11839 | Robert Cowham |
Check for edited files changed. Remove unused parameter from read/write content calls (filetype). Fix problem with purged files being read-only. |
||
#66 | 11838 | Robert Cowham | Add better sr/target content error output | ||
#65 | 11728 | Robert Cowham |
Handle very old logic (pre-tamper protection) where an integrate with resolve -ay could occur and the content of the file be edited in the same change. The result is demoted from an integrate to and edit, but still has the integraiton record. |
||
#64 | 11721 | Robert Cowham | Fix problem with writing purged files where an intervening directory doesn't exist. | ||
#63 | 11547 | Robert Cowham | With workspace line endings as share, read/write with binary when looking at contents. | ||
#62 | 11518 | Robert Cowham |
Handle text filetypes properly with MD5 calcuations and line endings. Require source and client workspaces to have LineEnd='share' (with test). |
||
#61 | 11478 | Robert Cowham |
Add the ability to batch changes up (default 20,000) Includes tests and adjustments to loggin. |
||
#60 | 11445 | Robert Cowham | Handle delete integration requiring the force flag. | ||
#59 | 11433 | Robert Cowham | Minor speedup - combine p4 filelog calls. | ||
#58 | 11432 | Robert Cowham |
Minor refactoring. Fixed some pylint warnings. |
||
#57 | 11430 | Robert Cowham | Expanded testIntegDeleteProblem and fixed the resulting test failure. | ||
#56 | 11425 | Robert Cowham | Handle strange case of 3 seperate integrations into a single (new) revision. | ||
#55 | 11407 | Robert Cowham |
Fix problem where an integrate defaults to resolving an add as a delete and requires a second step to be resolved as an add (with integ.engine=3). |
||
#54 | 11374 | Robert Cowham | Fix change formatting - get newlines in there. | ||
#53 | 11343 | Robert Cowham |
Change parameter from --sample_config to --sample-config Don't convert things to lowercase - causes confusion with some values. |
||
#52 | 11342 | Robert Cowham |
Handle renames which were badly propagated via individual integs of the move/add and move/delete using the integ.engine=2 |
||
#51 | 11305 | Robert Cowham | Handle wildcard chars for purged files. | ||
#50 | 11304 | Robert Cowham | Fix files with perforce wildcars in their names when they are branched from outside. | ||
#49 | 11301 | Robert Cowham | Previous fix not quite correct in terms of order of if statements! | ||
#48 | 11300 | Robert Cowham |
Create specific exception classes. Handle a integrate -Rb with ignore which comes from "outside" the source workspace. |
||
#47 | 11295 | Robert Cowham |
Handle an integrate where the source has been obliterated (fixes an index error). The target action will be an edit. |
||
#46 | 11293 | Robert Cowham |
Handle integ -Rb which is ignored where the source is not in the source client Just ignore the revision in this case. |
||
#45 | 11275 | Robert Cowham |
Handle files with action 'archive' - they have been archived in an archive depot and are ignored by P4Transfer (with a warning in the log file). |
||
#44 | 11271 | Robert Cowham |
Handle files with action 'import' which means they came from remote depots. This action is changed to an 'add' in the target. |
||
#43 | 11267 | Robert Cowham | Properly do change from #39 which remove -f flag from add command. | ||
#42 | 11266 | Robert Cowham | Ensure that workspaces have a matching right hand side. | ||
#41 | 11265 | Robert Cowham |
Handle an integrate with ignore to a deleted first revision Tweak tests to work with Python2.7/3.3 as regards unittest.assertRegex |
||
#40 | 11259 | Robert Cowham |
Check for python versions 2.7 or 3.3+ Log python and OS versions. |
||
#39 | 11257 | Robert Cowham | Fix handling of purged files (+Sn). | ||
#38 | 11237 | Robert Cowham |
Added an (optional) change_map file which is appended to after submits. It shows: sourceP4Port,sourceChangeNo,targetChangeNo Makes for easy searching (offline or via p4 grep) as opposed to looking through change list descriptions. |
||
#37 | 11234 | Robert Cowham | Implement configurable change descriptions | ||
#36 | 11230 | Robert Cowham | Handle a delete on top of a delete. | ||
#35 | 11049 | Robert Cowham | Handled strange 'add from' single integration (which is a backout of an earlier change followed by a move) | ||
#34 | 10739 | Robert Cowham | Fix for integrating 2 versions into single target where the target is revision 1. | ||
#33 | 10697 | Robert Cowham | Remove clean command - doesn't perform well. | ||
#32 | 10509 | Robert Cowham | Deal with multiple integrates into the same target. | ||
#31 | 10505 | Robert Cowham | Ignore files where an integrate -f doesn't produce a result. | ||
#30 | 10503 | Robert Cowham | Add -Dt to integrate flags when necessary | ||
#29 | 10502 | Robert Cowham | Handle need to do forced integrates | ||
#28 | 10498 | Robert Cowham |
Handle -d/-Ds flag warning. Make sizes on source client specific - proper fix to previous change! Clean target workspace. |
||
#27 | 10496 | Robert Cowham |
Test for failed submit. When running sizes command, restrict to workspace view for performance (avoid spec depot!) |
||
#26 | 10484 | Robert Cowham |
Test for file content changed when adding a file. Minor refactor for the above. Added testOutsideInsideDirtyCopy |
||
#25 | 10483 | Robert Cowham | Minor logging tweaks. | ||
#24 | 10482 | Robert Cowham | Log sync command | ||
#23 | 10479 | Robert Cowham | Resync after move/add if necessary due to file content changes | ||
#22 | 10475 | Robert Cowham | Handle the rename of a deleted file - rather esoteric but possible! | ||
#21 | 10472 | Robert Cowham |
Sync specified rev not just head when syncing for move. Improve error reporting when validate error encountered. |
||
#20 | 10471 | Robert Cowham | Refactor - clarify naming around file content comparison | ||
#19 | 10470 | Robert Cowham | When resyncing src files, sync to specified revision. | ||
#18 | 10469 | Robert Cowham |
Check file size and digest after a clean merge Handled dodgy merges - cherry picked where they were edited afterwards |
||
#17 | 10467 | Robert Cowham |
Fix problem requiring integ -i (with integ.engine=2) Improved validation after submission. |
||
#16 | 10438 | Robert Cowham |
Fixed bug where effectively selective integrations were being done on the source. Started better comparison preparation for changelist validation. |
||
#15 | 10214 | Robert Cowham | Validate submitted change afterwards. | ||
#14 | 10181 | Robert Cowham | Log warnings for every p4cmd | ||
#13 | 10180 | Robert Cowham | Detect tamper check problem on resolve -am | ||
#12 | 10177 | Robert Cowham | Revert file before attempting resolve -ae (editFrom) | ||
#11 | 10162 | Robert Cowham | Sync target to avoid tamper check | ||
#10 | 10161 | Robert Cowham | Handle a 'resolve skipped' warning - likely sign of integration engine differences... | ||
#9 | 10147 | Robert Cowham | Fix problem with move/add where there are other integration records on the file that is being added. | ||
#8 | 10141 | Robert Cowham |
Enhanced logging - added specifid ids for different actions. Log a few extra items, including script version and client mappings. Fixed bug regarding the branching of a move file. |
||
#7 | 10134 | Robert Cowham | Fixed problem where the target of a move is outside the client view - converts move/delete to a delete | ||
#6 | 10110 | Robert Cowham |
Remove unused vars as per pyflakes warnings Add some extra debug logging of paths executed Remove unused code when processing 'add from' |
||
#5 | 10098 | Robert Cowham |
Fixed bug when a move (rename) by P4V was backed out via 'revert to revision' Removed python3 warning |
||
#4 | 9730 | Robert Cowham | Fixed problem where a changelist to both files inside and outside the workspace view in the same changelist are handled | ||
#3 | 9641 | Robert Cowham |
Latest changes by Robert. Added new options: --repeat for continuous operation --sample-config to produce sample config -Improved logging and notification options (via emails if configured) -Retries in case of error. |
||
#2 | 9473 | Sven Erik Knop |
Added the ability to remove +k from the target Currently tested for add, need to test for edit and integrate as well invoked by using option -k or --nokeywords |
||
#1 | 9170 | Sven Erik Knop |
Branched PerforceTransfer from private area to perforce_software This tool will now get back its original name P4Transfer. |
||
//guest/sven_erik_knop/P4Pythonlib/scripts/PerforceTransfer.py | |||||
#17 | 8554 | Sven Erik Knop | Added debug output for failed filelog retrieval. | ||
#16 | 8463 | Sven Erik Knop |
Fixed further problem with files that have an illegal file name containing @,#,* or %. Now it is possible to re-edit the file again as well. Added test case to prove the point. |
||
#15 | 8461 | Sven Erik Knop |
Fixed adding files with illegal chars like '@'. Also added test case. |
||
#14 | 8432 | Sven Erik Knop | Added pre-flight checks (-p) to avoid overwriting existing files. | ||
#13 | 8430 | Sven Erik Knop |
Added maximum option for changes to limit the number of changes transferred in each run. Should be useful for testing. Mind that "p4 changes" starts at the latest changes, so if there are millions of changes to transfer it will still take a long time to load all of the changes into memory first. |
||
#12 | 8429 | Sven Erik Knop | Added logging | ||
#11 | 8428 | Sven Erik Knop |
Transferred changes are now adjusted: - the transfer user is replaced with the original user - the submit date is reset to the original date Any +k files are re-verified to assure that they have the correct checksum. Still missing: - Logging |
||
#10 | 8425 | Sven Erik Knop |
Make PerforceTransfer unidirectional from source to target. Adjusted test cases accordingly. Still missing: Update change user and timestamp to the source user and timestamp Reverify ktext files affected by the change update. Add proper logging |
||
#9 | 8232 | Sven Erik Knop | Better safe than sorry: quotes around the path of the localMap entries. | ||
#8 | 8231 | Sven Erik Knop |
Removed all traces of p4.run_where and replaced them with local map.translate. Hopefully this will improve the performance of PerforceTransfer. |
||
#7 | 8216 | Sven Erik Knop |
Added test cases for integration from outside transfer scope. Fixed bug for integrated deletes from the outside. |
||
#6 | 8215 | Sven Erik Knop |
Upgraded test to include merge w/ edit Fixed a bug in PerforceTransfer.py avoiding a tamper check error. |
||
#5 | 8212 | Sven Erik Knop |
Added integrate-delete test case Solved integrate-delete problem in PerforceTransfer |
||
#4 | 8211 | Sven Erik Knop |
Additional test cases for integrate Fixed a bug with "ignore", can now be replicated. |
||
#3 | 8210 | Sven Erik Knop |
Fixed a bug in PerforceTransfer where an add followed by an integ to another branch would break the add. Also added the beginning of a test framework to catch those kind of problems in the future. Currently the test framework only checks add, edit, delete and simple integrates. |
||
#2 | 8209 | Sven Erik Knop |
Change formatting to tabs Made Python3 compatible Fixed a small bug in integrate |
||
#1 | 7986 | Sven Erik Knop | Changed P4Transfer to PerforceTransfer to conform with naming convention. | ||
//guest/sven_erik_knop/P4Pythonlib/scripts/P4Transfer.py | |||||
#10 | 7973 | Sven Erik Knop |
Enable re-adding of files for 2010.2+ servers. The problem was that the server now adds integration records for re-added files, which made P4Transfer believe this was a dirty branch instead of an add. Now we check if the "how" is "add from", indicating a re-add. |
||
#9 | 7971 | Sven Erik Knop |
Updated P4Transfer to deal with merge w/ edit integrations. All types of integrations should now be supported. Also updated the documentation. |
||
#8 | 7966 | Sven Erik Knop |
Changed master and local to server1 and server2. Also added first draft of a documentation that should serve pretty much as the blog post I intend to write on this tool. |
||
#7 | 7965 | Sven Erik Knop | Updated the shebang to avoid hardcoding the Python version. | ||
#6 | 7964 | Sven Erik Knop | Changed type to kxtext by popular demand. | ||
#5 | 7963 | Sven Erik Knop | Fixed the tamper problem. | ||
#4 | 7962 | Sven Erik Knop |
Updated P4Transfer with the ability to deal with +k types and merged files from integration. The result of the latter is an 'edit from' to avoid a tamper check problem. This is a hack for now until I can find a better way around it, but the repercussions should be low. |
||
#3 | 7961 | Sven Erik Knop |
Enable preview (-n) again. Not sure how it got lost. |
||
#2 | 7960 | Sven Erik Knop | Updated Copyright date and changed to ktext. | ||
#1 | 7959 | Sven Erik Knop |
P4Transfer release 1.0. Documentation to follow. |