#!/bin/bash # # This tool generates the required configuration files for Helix Threat Detection # and optionally deploys the configuration. # # $Id: //guest/cmclouth/projects/htd-deployment/htd-source/runhtddeploy.sh#8 $ # # runremotecommand $remoteserver $remotecmd $message function runremotecommand() { remoteserver=$1 remotecmd=$2 message=$3 if [ -n message ]; then echo "$message" fi ssh -t "$remoteserver" "$remotecmd" } # copyfileremote $localfile $remotefile $message function copyfileremote() { localfile=$1 remotefile=$2 message=$3 if [ -n message ]; then echo "$message" fi scp "$localfile" "$remotefile" > /dev/null } # copydirremote $localfile $remotefile $message function copydirremote() { localfile=$1 remotefile=$2 message=$3 if [ -n message ]; then echo "$message" fi scp -r "$localfile" "$remotefile" > /dev/null } function runonejps() { remoteserver=$1 runremotecommand "$remoteserver" jps "Check status of $remoteserver" } function runalljps() { runonejps "$htdanalyticsmaster" for htdanalyticsdataslave in $(grep -v "^$" "$htdanalyticsdatafile" | grep -v "^#"); do runonejps $htdanalyticsdataslave done for htdinvestigator in $(grep -v "^$" "$htdinvestigatorfile" | grep -v "^#" | cut -sf1); do runonejps $htdinvestigator done } function runanalyticschema() { remoteserver=$1 runremotecommand "$remoteserver" "cd /opt/interset/analytics/bin; ./sql.sh --dbServer $remoteserver --action migrate" "begin analytics schema part 1 of 2" echo "completed analytics schema part 1 of 2" remoteserver=$1 runremotecommand "$remoteserver" "cd /opt/interset/analytics/bin; ./sql.sh --dbServer $remoteserver --action migrate_aggregates" "begin analytics schema part 2 of 2" echo "completed analytics schema part 2 of 2" } function runformathdfs() { remoteserver=$1 # on all nodes remove prior data just in case # rm -Rf /opt/interset/data/hadoop/hdfs/dn/* /opt/interset/data/hadoop/hdfs/nn/* /opt/interset/data/hadoop/hdfs/snn/* for htdanalyticsdataslave in $(grep -v "^$" "$htdanalyticsdatafile" | grep -v "^#"); do runremotecommand "$htdanalyticsdataslave" "rm -Rf $intersetdatadir/hadoop/hdfs/dn/* $intersetdatadir/hadoop/hdfs/nn/* $intersetdatadir/hadoop/hdfs/snn/* $intersetdatadir/hbase/zookeeper/*" "$htdanalyticsdataslave clean hdfs" done remoteserver=$1 runremotecommand "$remoteserver" "rm -Rf $intersetdatadir/hadoop/hdfs/dn/* $intersetdatadir/hadoop/hdfs/nn/* $intersetdatadir/hadoop/hdfs/snn/* $intersetdatadir/hbase/zookeeper/*" "$remoteserver clean hdfs" # cd /opt/interset/hadoop/bin; ./hdfs namenode -format remoteserver=$1 runremotecommand "$remoteserver" "cd /opt/interset/hadoop/bin; ./hdfs namenode -format" "$remoteserver format hdfs" } function runstartanalytics() { echo "Would you like to format Hadoop Distributed File System ($htdanalyticsmaster)?" select yn in "Yes" "No"; do case $yn in Yes ) runformathdfs $htdanalyticsmaster; break;; No ) break;; esac done echo "Would you like to remove previous logs ($htdanalyticsmaster)?" select yn in "Yes" "No"; do case $yn in Yes ) rm -f /opt/interset/hbase/logs/* /opt/interset/hadoop/logs/* /opt/interset/spark/logs/* ; break;; No ) break;; esac done runremotecommand "$htdanalyticsmaster" "~/tools/startanalytics.sh" "analytics starting on all nodes" runalljps echo "Would you like to create analytic schema ($htdanalyticsmaster)?" select yn in "Yes" "No"; do case $yn in Yes ) runanalyticschema $htdanalyticsmaster; break;; No ) break;; esac done } function runstartinvestigators() { for htdinvestigator in $(grep -v "^$" "$htdinvestigatorfile" | grep -v "^#" | cut -sf1); do echo "Would you like to initialize database ($htdinvestigator)?" select yn in "Yes" "No"; do case $yn in Yes ) runremotecommand "$htdinvestigator" "cd /opt/interset/reporting; java -jar $htdinvestigatorjar db migrate investigator.yml" "Initializing databse on $htdinvestigator"; break;; No ) break;; esac done runremotecommand "$htdinvestigator" "sudo update-rc.d nginx enable;sudo update-rc.d reporting enable" "Add nginx and reporting to startup on $htdinvestigator" runremotecommand "$htdinvestigator" "sudo service nginx start;sudo service reporting start" "Start nginx and reporting on $htdinvestigator" done } function rundeployanalytics() { # echo "rundeployanalytics" remoteserver=$1 # copy files from htddeploy to target server # hadoop hadoop/etc/hadoop/slaves copyfileremote "$htddeploy/hadoop/etc/hadoop/slaves" "$remoteserver:/opt/interset/hadoop/etc/hadoop/slaves" "deploy $remoteserver:/opt/interset/hadoop/etc/hadoop/slaves" # hadoop hadoop/etc/hadoop/core-site.xml copyfileremote "$htddeploy/hadoop/etc/hadoop/core-site.xml" "$remoteserver:/opt/interset/hadoop/etc/hadoop/core-site.xml" "deploy $remoteserver:/opt/interset/hadoop/etc/hadoop/core-site.xml" # hbase regionservers hbase/conf/regionservers copyfileremote "$htddeploy/hbase/conf/regionservers" "$remoteserver:/opt/interset/hbase/conf/regionservers" "deploy $remoteserver:/opt/interset/hbase/conf/regionservers" # hbase hbase/conf/hbase-site.xml copyfileremote "$htddeploy/hbase/conf/hbase-site.xml" "$remoteserver:/opt/interset/hbase/conf/hbase-site.xml" "deploy $remoteserver:/opt/interset/hbase/conf/hbase-site.xml" # spark slaves spark/conf/slaves copyfileremote "$htddeploy/spark/conf/slaves" "$remoteserver:/opt/interset/spark/conf/slaves" "deploy $remoteserver:/opt/interset/spark/conf/slaves" # analytics analytics/conf/interset.conf copyfileremote "$htddeploy/analytics/conf/interset.conf" "$remoteserver:/opt/interset/analytics/conf/interset.conf" "deploy $remoteserver:/opt/interset/analytics/conf/interset.conf" # runremotecommand "$remoteserver" 'grep -ir "htd" /opt/interset/hadoop/etc/ /opt/interset/hbase/conf/ /opt/interset/spark/conf/ /opt/interset/analytics/conf/ /opt/interset/reporting/investigator.yml' "" } function rundeploymaster() { # echo "rundeploymaster" # confirm ssh to all slaves for htdanalyticsdataslave in $(grep -v "^$" "$htdanalyticsdatafile" | grep -v "^#"); do # runremotecommand "$htdanalyticsmaster" "ssh-copy-id $htdanalyticsdataslave" "copy ssh key from $htdanalyticsmaster to $htdanalyticsdataslave" runremotecommand "$htdanalyticsmaster" "ssh $htdanalyticsdataslave hostname" "testing ssh from $htdanalyticsmaster to $htdanalyticsdataslave" done # confirm ssh to self runremotecommand "$htdanalyticsmaster" "ssh $htdanalyticsmaster hostname" "testing ssh from $htdanalyticsmaster to $htdanalyticsmaster" # confirm ssh to localhost # runremotecommand "$htdanalyticsmaster" "ssh-copy-id localhost" "copy ssh key from $htdanalyticsmaster to localhost" runremotecommand "$htdanalyticsmaster" "ssh localhost hostname" "testing ssh from $htdanalyticsmaster to localhost" runremotecommand "$htdanalyticsmaster" "mkdir -p ~/tools; rm -Rf ~/tools/*" "create tools folder on $htdanalyticsmaster" copydirremote "$htddeploy/tools/" "$htdanalyticsmaster:~" "deploy $htdanalyticsmaster:~/tools/" rundeployanalytics $htdanalyticsmaster } function rundeploydata() { # echo "rundeploydata" for htdanalyticsdataslave in $(grep -v "^$" "$htdanalyticsdatafile" | grep -v "^#"); do rundeployanalytics $htdanalyticsdataslave runremotecommand "$htdanalyticsdataslave" "mkdir -p ~/sockets; chmod 700 ~/sockets/" "" done } function rundeployinvestigator() { # echo "rundeployinvestigator" remoteserver=$1 # copy files from htddeploy to target server copyfileremote "$htddeploy/reporting/$remoteserver/investigator.yml" "$remoteserver:/opt/interset/reporting/investigator.yml" "deploy $remoteserver:/opt/interset/reporting/investigator.yml" # copy configuration to investigator just in case it is needed as a working node runremotecommand "$remoteserver" "mkdir -p ~/tools; rm -Rf ~/tools/*" "create tools folder on $remoteserver" copydirremote "$htddeploy/tools/" "$remoteserver:~" "deploy $remoteserver:~/tools/" rundeployanalytics "$remoteserver" # runremotecommand "$remoteserver" 'grep -ir "htd" /opt/interset/reporting/investigator.yml' "" } function rundeployinvestigators() { # echo "rundeployinvestigators" for htdinvestigator in $(grep -v "^$" "$htdinvestigatorfile" | grep -v "^#" | cut -sf1); do runremotecommand "$htdanalyticsmaster" "ssh $htdinvestigator hostname" "testing ssh from $htdanalyticsmaster to $htdinvestigator" rundeployinvestigator $htdinvestigator done } # generate configuration files function genconfig() { # clean htddeploy rm -rf "$htddeploy/*" mkdir -p "$htddeploy/tools" cp -ar "$htdsource/tools/" "$htddeploy/" # configure hadoop mkdir -p "$htddeploy/hadoop/etc/hadoop" # hadoop slaves hadoop/etc/hadoop/slaves grep -v "^$" "$htdanalyticsdatafile" | grep -v "^#" > "$htddeploy/hadoop/etc/hadoop/slaves" # hadoop core-site hadoop/etc/hadoop/core-site.xml sed 's/\$htd-analytics-master\$/'"$htdanalyticsmaster"'/' "$htdsource/hadoop/etc/hadoop/core-site.xml" > "$htddeploy/hadoop/etc/hadoop/core-site.xml" # configure hbase mkdir -p "$htddeploy/hbase/conf" # hbase regionservers hbase/conf/regionservers grep -v "^$" "$htdanalyticsdatafile" | grep -v "^#" > "$htddeploy/hbase/conf/regionservers" # hbase/conf/hbase-site.xml sed 's/\$htd-analytics-master\$/'"$htdanalyticsmaster"'/' "$htdsource/hbase/conf/hbase-site.xml" > "$htddeploy/hbase/conf/hbase-site.xml" # configure spark mkdir -p "$htddeploy/spark/conf" # spark slaves spark/conf/slaves grep -v "^$" "$htdanalyticsdatafile" | grep -v "^#" > "$htddeploy/spark/conf/slaves" # configure analytics mkdir -p "$htddeploy/analytics/conf" # analytics/conf/interset.conf sed 's/\$htd-investigators\$/'"$htdinvestigators"'/' "$htdsource/analytics/conf/interset.conf" | sed 's/\$htd-analytics-master\$/'"$htdanalyticsmaster"'/' > "$htddeploy/analytics/conf/interset.conf" # configure reporting mkdir -p "$htddeploy/reporting" testcounter=0 for field in $(grep -v "^$" "$htdinvestigatorfile" | grep -v "^#" | cut -s -f1-2); do ((testcounter += 1)) if [ $((testcounter % 2)) -gt 0 ]; then htdinvestigator=$field else htdinvestigatorfqdn=$field mkdir -p "$htddeploy/reporting/$htdinvestigator" sed 's/\$htd-analytics-master\$/'"$htdanalyticsmaster"'/' "$htdsource/reporting/investigator.yml" | sed 's/\$htd-investigator-fqdn\$/'"$htdinvestigatorfqdn"'/' > "$htddeploy/reporting/$htdinvestigator/investigator.yml" fi done # debugging # grep -ir "htd" "$htddeploy/hadoop/" "$htddeploy/hbase/" "$htddeploy/spark/" "$htddeploy/analytics/" "$htddeploy/reporting/" echo "configuration files created in $htddeploy" } # Exit immediately on non-zero status. set -e if [ $# -eq 0 ]; then echo "Usage: ./htd-configure.sh </Path/to/conf_file>" echo "Example: ./htd-configure /home/interset/htd-source/htddeploy.conf" exit 1 fi HTD_CONFIGURE_CONF=$1 # Source htdanalyticsmaster,htdanalyticsdatafile,htdinvestigatorfile,htddeploy from conf. file export $(grep -i htdanalyticsmaster $HTD_CONFIGURE_CONF | grep -v '#' | tr -d '[:blank:]' ) export $(grep -i htdanalyticsdatafile $HTD_CONFIGURE_CONF | grep -v '#' | tr -d '[:blank:]' ) export $(grep -i htdinvestigatorfile $HTD_CONFIGURE_CONF | grep -v '#' | tr -d '[:blank:]' ) export $(grep -i htddeploy $HTD_CONFIGURE_CONF | grep -v '#' | tr -d '[:blank:]' ) export $(grep -i htdsource $HTD_CONFIGURE_CONF | grep -v '#' | tr -d '[:blank:]' ) export $(grep -i htdinvestigatorjar $HTD_CONFIGURE_CONF | grep -v '#' | tr -d '[:blank:]' ) export $(grep -i intersetdatadir $HTD_CONFIGURE_CONF | grep -v '#' | tr -d '[:blank:]' ) # Make sure configuration is set if [ -z "$htdanalyticsmaster" ]; then echo "htdanalyticsmaster not set!" exit 1 fi if [ -z "$htdanalyticsdatafile" ]; then echo "htdanalyticsdatafile not set!" exit 1 fi if [ -z "$htdinvestigatorfile" ]; then echo "htdinvestigatorfile not set!" exit 1 fi if [ -z "$htddeploy" ]; then echo "htddeploy not set!" exit 1 fi if [ -z "$htdinvestigatorjar" ]; then echo "htdinvestigatorjar not set!" exit 1 fi testcounter=$(grep -v "^$" "$htdanalyticsdatafile" | grep -v "^#" | wc -l) if [ $testcounter -lt 1 ]; then echo "no analytic data slaves found in $htdanalyticsdatafile" exit 1 fi for htdanalyticsdataslave in $(grep -v "^$" "$htdanalyticsdatafile" | grep -v "^#"); do if [ -z $htdanalyticsdataslaves ]; then htdanalyticsdataslaves=$htdanalyticsdataslave else htdanalyticsdataslaves=$htdanalyticsdataslaves,$htdanalyticsdataslave fi done testcounter=$(grep -v "^$" "$htdinvestigatorfile" | grep -v "^#" | wc -l) if [ $testcounter -lt 1 ]; then echo "no analytic data slaves found in $htdinvestigatorfile" exit 1 fi for htdinvestigator in $(grep -v "^$" "$htdinvestigatorfile" | grep -v "^#" | cut -sf1); do if [ -z $htdinvestigators ]; then htdinvestigators=$htdinvestigator else htdinvestigators=$htdinvestigators,$htdinvestigator fi done # configure settings echo "htdanalyticsmaster=$htdanalyticsmaster" echo "htdanalyticsdatafile=$htdanalyticsdatafile" echo "htdinvestigatorfile=$htdinvestigatorfile" echo "htddeploy=$htddeploy" echo "htdsource=$htdsource" echo "htdanalyticsdataslaves=$htdanalyticsdataslaves" echo "htdinvestigators=$htdinvestigators" echo "htdinvestigatorjar=$htdinvestigatorjar" echo "intersetdatadir=$intersetdatadir" # menu action="nothing" until [ "$action" = "quit" ] do echo "Select option to perform" select action in "GenerateConfigFiles" "DeployAnalytics" "DeployInvestigators" "StartAnalytics" "StartInvestigators" "quit"; do case $action in GenerateConfigFiles ) genconfig;break;; DeployAnalytics ) rundeploymaster;rundeploydata;break;; DeployInvestigators ) rundeployinvestigators;break;; StartAnalytics ) runstartanalytics;break;; StartInvestigators ) runstartinvestigators;break;; quit ) break;; esac done done exit 1
# | Change | User | Description | Committed | |
---|---|---|---|---|---|
#8 | 18158 | Charlie McLouth | Changed filetype to ktext | ||
#7 | 18154 | Charlie McLouth | corrected logic for determining services are running correctly | ||
#6 | 18153 | Charlie McLouth | Merging changes from dev to main | ||
#5 | 18151 | Charlie McLouth | Update configuration and change to deployment | ||
#4 | 18133 | Charlie McLouth |
all tools are in the /tools/ directory. not in home |
||
#3 | 18128 | Charlie McLouth | configuration changes for the 3.1.X versions of Interset | ||
#2 | 18126 | Charlie McLouth |
Fixed issue where migrate_aggregates was not being performed. Modified stop script to showcase jps output from all nodes |
||
#1 | 18125 | Charlie McLouth | scripts and configuration for deploying analytics and investigators |