<?php
/**
* Perforce Swarm
*
* @copyright 2015 Perforce Software. All rights reserved.
* @license Please see LICENSE.txt in top-level readme folder of this distribution.
* @version <release>/<patch>
*/
namespace CodeDeploy;
require 'vendor/autoload.php';
use Aws\Credentials\CredentialProvider;
use Aws\S3\S3Client;
use Aws\Exception\AwsException;
use Aws\S3\Exception\S3Exception;
use Reviews\Model\Review;
use Activity\Model\Activity;
use Comments\Model\Comment;
use P4\Spec\Change;
use Projects\Model\Project;
use Zend\Mvc\MvcEvent;
use Zend\Http\Client;
use Zend\Http\Request;
use Zend\Json\Json;
class Module
{
/**
* Connect to queue event manager to handle changes.
*
* @param MvcEvent $event the bootstrap event
* @return void
*/
public function onBootstrap(MvcEvent $event)
{
$application = $event->getApplication();
$services = $application->getServiceManager();
$manager = $services->get('queue');
$events = $manager->getEventManager();
$logger = $services->get('logger');
$filters = $services->get('InputFilterManager');
$projectFilter = $filters->get('ProjectFilter');
$projectFilter->add(
array(
'name' => 'codeDeploy',
'required' => false,
'filters' => array(
array(
'name' => 'Callback',
'options' => array(
'callback' => function ($value) {
$value = (array)$value + array(
'enabled' => false,
'bucket' => ''
);
return array(
'enabled' => (bool)$value['enabled'],
'bucket' => (string)$value['bucket']
);
}
)
)
)
)
);
$filters->setService('ProjectFilter', $projectFilter);
$helpers = $services->get('ViewHelperManager');
$helpers->get('headScript')->prependFile('/module/CodeDeploy/js/CodeDeploy.js', $type = 'text/javascript');
// connect to all tasks and write activity data
// we do this late (low-priority) so all handlers have
// a chance to influence the activity model.
$events->attach(
'task.review',
function ($event) use ($services, $logger) {
$id = $event->getParam('id');
$p4 = $services->get('p4');
$logger->info("CodeDeploy: activity...");
$logger->info("CodeDeploy: id=$id");
try {
$change = Change::fetch($id, $p4);
$review = Review::fetch($change->getId(), $p4);
$headChange = $review->getHeadChange();
$logger->info("DEBUG: headChange: $headChange");
// prepare list of projects affected by the change
$impacted = Project::getAffectedByChange($change, $p4);
// exit early if no projects
if (!$impacted) {
return;
}
// fetch projects
$projects = Project::fetchAll(array(Project::FETCH_BY_IDS => array_keys($impacted)), $p4);
foreach ($projects as $projectId => $project) {
// Get S3 Bucket
$codeDeploy = $project->getRawValue('codeDeploy');
$enabled = $codeDeploy['enabled'];
$bucket = $codeDeploy['bucket'];
// Skip projects with no S3 Bucket
if (!$enabled || !isset($bucket) || trim($bucket) === '') {
continue;
}
foreach ($project->getBranches() as $branch) {
foreach ($branch['paths'] as $path) {
$filespec = $path . '@' . $headChange;
$id = $branch['id'];
$logger->info("CodeDeploy: impacted: $projectId/$id ($filespec)");
$archiveFile = $this->archiveCode($filespec, $services);
$d = var_export($archiveFile, true);
$logger->info("DEBUG: archiveFile: $d");
// Sync code to S3 Bucket
$s3path = "$bucket/$projectId.$id.$headChange.zip";
$this->syncBucket($s3path, $archiveFile, $services);
}
}
}
$logger->info("CodeDeploy: end.");
} catch (\Exception $e) {
$logger->err($e);
}
},
//
-300
);
}
private function archiveCode($filespec, $services)
{
$cacheDir = DATA_PATH . '/cache/archives';
$config = $services->get('config');
$p4 = $services->get('p4');
$archiver = $services->get('archiver');
$logger = $services->get('logger');
// set protections on the archiver to filter out files user doesn't have access to
$archiver->setProtections($services->get('ip_protects'));
try {
// get files to compress
$filesInfo = $archiver->getFilesInfo($filespec, $p4);
$d = var_export($filesInfo, true);
$logger->info("DEBUG: filesInfo: $d");
// exit if files to compress are over the maximum size limit (if set)
$maxSize = (int)$config['archives']['max_input_size'];
$logger->info("DEBUG: max_input_size: $maxSize");
if ($maxSize && $filesInfo['size'] > $maxSize) {
$logger->err("CodeDeploy: Archive too large (" . $filesInfo['size'] . " > $maxSize)");
return null;
}
// archive files matching filespec
\Record\Cache\Cache::ensureWritable($cacheDir);
$archiveFile = $cacheDir . '/' . $filesInfo['digest'] . '.zip';
$statusFile = $cacheDir . '/' . $filesInfo['digest'] . '.status';
$archiver->archive($filespec, $archiveFile, $statusFile, $p4);
// add a future task to remove archive file after its lifetime set in config (defaults to 1 day)
$cacheLifetime = isset($config['archives']['cache_lifetime'])
? $config['archives']['cache_lifetime']
: 60 * 60 * 24;
$services->get('queue')->addTask(
'cleanup.archive',
$archiveFile,
array('statusFile' => $statusFile),
time() + $cacheLifetime
);
$d = var_export($archiveFile, true);
$logger->info("DEBUG: archiveFile: $d");
return $archiveFile;
} catch (\InvalidArgumentException $e) {
$logger->err($e);
}
}
private function syncBucket($path, $archiveFile, $services)
{
$config = $services->get('config');
$logger = $services->get('logger');
$d = var_export($archiveFile, true);
$logger->info("DEBUG: archiveFile: $d");
try {
$logger->info("CodeDeploy: bucket=$path");
$s3 = new S3Client([
'region' => $config['aws']['region'],
'version' => $config['aws']['version'],
//'credentials' => CredentialProvider::env()
'credentials' => [
'key' => $config['aws']['aws_access_key_id'],
'secret' => $config['aws']['aws_secret_access_key'],
]
]);
// Register with stream wrapper for s3:// protocol
$s3->registerStreamWrapper();
// Copy archive to S3 storage
copy($archiveFile, $path);
} catch (S3Exception $e) {
// Catch an S3 specific exception.
$logger->info("S3:" . $e->getMessage());
} catch (AwsException $e) {
// This catches the more generic AwsException.
$logger->info("AWS ID: " . $e->getAwsRequestId());
$logger->info("AWS Type: " . $e->getAwsErrorType());
$logger->info("AWS Code: " . $e->getAwsErrorCode());
}
}
public function getConfig()
{
return include __DIR__ . '/config/module.config.php';
}
public function getAutoloaderConfig()
{
return array(
'Zend\Loader\StandardAutoloader' => array(
'namespaces' => array(
__NAMESPACE__ => __DIR__ . '/src/' . __NAMESPACE__,
),
),
);
}
} | # | Change | User | Description | Committed | |
|---|---|---|---|---|---|
| #1 | 19727 | Paul Allen |
Basic upload of review zip to S3 Bucket. Does not include shelved files. |