these had some checks on actions, but not on all. i want anything that fails during an update or a push to cause the whole machinery to stop right there, because we cannot afford to be missing merge screw-ups.
#
# Author: Chris Koeritz
-# tests whether the last action worked or not, and if not, it issues the
-# complaint passed as the arguments.
-function check_if_failed()
-{
- if [ $? -ne 0 ]; then
- echo "Step FAILed: $*"
- return 1
- fi
-}
-
-# uses the failure checking function, but actually exits out of the script
-# if there was a failure detected.
-function exit_if_failed()
-{
- check_if_failed $*
- if [ $? -ne 0 ]; then
- exit 1
- fi
-}
-
# given a source and target folder, this synchronizes the source into the target.
function synch_to_backup()
{
exit 1
fi
echo "Synchronizing $source into $dest."
-####hmmm: temporary measure until top-level dir bug fixed in synch_files app.
-### if [ ! -d "$dest" ]; then
-### mkdir -p "$dest"
-### if [ $? -ne 0 ]; then
-### echo "FAILed to make target directory: $dest"
-### return 1
-### fi
-### fi
synch_files "$source" "$dest"
- check_if_failed "synching $source to $dest"
+ test_or_continue "synching $source to $dest"
}
##############
# now saddle up the backup.
#NO LONGER USING MOUNT: mount /z/backup/
-#NO LONGER USING MOUNT: exit_if_failed "mounting backup folder"
+#NO LONGER USING MOUNT: test_or_die "mounting backup folder"
# we should always be synching to an existing set in there. make sure they exist.
# for the first ever backup, this is not a good check...
#test -d /z/backup/etc -a -d /z/backup/home
-#exit_if_failed "testing presence of prior backup"
+#test_or_die "testing presence of prior backup"
##############
##############
#NO LONGER USING MOUNT: umount /z/backup/
-#NO LONGER USING MOUNT: exit_if_failed "unmounting backup folder"
+#NO LONGER USING MOUNT: test_or_die "unmounting backup folder"
echo making a simple starting log file for container...
if [ ! -d "$GFFS_LOGS" ]; then
mkdir -p "$GFFS_LOGS"
- test_or_fail Making GFFS logs directory.
+ test_or_die Making GFFS logs directory.
fi
echo building the code freshly, although not with a clean first...
build_gffs
-test_or_fail Building GFFS source code.
+test_or_die Building GFFS source code.
echo starting container now and spooling its log file...
(bash $GFFS_TOOLKIT_ROOT/library/maybe_restart_container.sh &>$TMP/main_container_restarting.log & )
# checks the result of the last command that was run, and if that failed,
# then this complains and exits from bash. the function parameters are
# used as the message to print as a complaint.
- function test_or_fail()
+ function test_or_die()
{
if [ $? -ne 0 ]; then
echo -e "\n\naction failed: $*\n\nExiting script..."
fi
}
- # like test_or_fail, but will keep going after complaining.
+ # like test_or_die, but will keep going after complaining.
function test_or_continue()
{
if [ $? -ne 0 ]; then
if [ -d "$src" ]; then
ln -s "$src" "$target"
- test_or_fail "Creating symlink from '$src' to '$target'"
+ test_or_die "Creating symlink from '$src' to '$target'"
fi
echo "Created symlink from '$src' to '$target'."
}
echo running tests on set_var_if_undefined.
flagrant=petunia
set_var_if_undefined flagrant forknordle
- test_or_fail "testing if defined variable would be whacked"
+ test_or_die "testing if defined variable would be whacked"
if [ $flagrant != petunia ]; then
echo set_var_if_undefined failed to leave the test variable alone
exit 1
# copy up the archived bluray discs, and possibly future archived formats.
netcp /z/archons/* $BARKY/bkup_archons/
-test_or_fail "synching archons"
+test_or_die "synching archons"
# copy over our somewhat attenuated but still important walrus archives.
netcp /z/walrus/* $BARKY/walrus/
-test_or_fail "synching walrus"
+test_or_die "synching walrus"
# copy all the music files for future reference.
netcp /z/musix/* $BARKY/musix/
-test_or_fail "synching musix"
+test_or_die "synching musix"
# back up the photo archives.
netcp /z/imaginations/* $BARKY/imaginations/
-test_or_fail "synching imaginations"
+test_or_die "synching imaginations"
source "$FEISTY_MEOW_SCRIPTS/core/launch_feisty_meow.sh"
cp $HOME/.thunderbird/oqv30zg4.default/ImapMail/zooty.koeritz.com/msgFilterRules.dat $CLOUD_BASE/magic_cabinet/mail_filters/zooty_serene_hamstertronic_$(date_stringer).filters
-test_or_fail "copying feistymeow.org filters"
+test_or_die "copying feistymeow.org filters"
cp $HOME/.thunderbird/oqv30zg4.default/ImapMail/mail.eservices.virginia.edu/msgFilterRules.dat $CLOUD_BASE/magic_cabinet/mail_filters/uva_email_$(date_stringer).filters
-test_or_fail "copying UVa filters"
+test_or_die "copying UVa filters"
if [ ! -d "$SPOOLING_OUTPUT_DIR" ]; then
mkdir -p "$SPOOLING_OUTPUT_DIR"
- test_or_fail Creating spooling output directory.
+ test_or_die Creating spooling output directory.
fi
# copies the files for a particular game out to a spooling folder.
echo "committing repositories at: $(date)"
echo
-FULL_LIST="$(dirname $FEISTY_MEOW_APEX) $HOME"
-#if [ "$OS" != "Windows_NT" ]; then
-# # first get individual folders.
-# checkin_list $(dirname $FEISTY_MEOW_APEX) $HOME
-#else (end old)
+FULL_LIST=" $(dirname $FEISTY_MEOW_APEX) $HOME "
if [ "$OS" == "Windows_NT" ]; then
- FULL_LIST+=" c:/ d:/ e:/"
+ FULL_LIST+=" c:/ d:/ e:/ "
fi
checkin_list $FULL_LIST
+test_or_die "checking in list: $FULL_LIST"
##############
#!/bin/bash
+# compresses the git archive in the folder specified.
+
prune_dir="$1"
if [ -z "$prune_dir" ]; then
prune_dir="$(pwd)"
fi
pushd "$prune_dir"
-echo cleaning git in directory $(pwd)
+echo "cleaning git repo in directory $(pwd)"
git fsck --full
-check_if_failed "git fsck"
+test_or_die "git fsck"
git gc --prune=today --aggressive
-check_if_failed "git gc"
+test_or_die "git gc"
git repack
-check_if_failed "git repack"
+test_or_die "git repack"
popd
#!/bin/bash
+
# does differences on a set of folders checked out from subversion or git.
# this can take a directory as parameter, but will default to the current
# working directory. all the directories under the passed directory will
# be examined.
+source "$FEISTY_MEOW_SCRIPTS/core/launch_feisty_meow.sh"
+source "$FEISTY_MEOW_SCRIPTS/rev_control/version_control.sh"
+
+##############
+
dir="$1"; shift
if [ -z "$dir" ]; then
dir=.
fi
-source "$FEISTY_MEOW_SCRIPTS/core/launch_feisty_meow.sh"
-source "$FEISTY_MEOW_SCRIPTS/rev_control/version_control.sh"
-
pushd "$dir" &>/dev/null
+test_or_die "changing to directory: $dir"
tempfile=$(generate_rev_ctrl_filelist)
popd &>/dev/null
perform_revctrl_action_on_file "$tempfile" do_diff
+test_or_die "performing revision control action do_diff on: $tempfile"
source "$FEISTY_MEOW_SCRIPTS/core/launch_feisty_meow.sh"
source "$FEISTY_MEOW_SCRIPTS/rev_control/version_control.sh"
+##############
+
# trickery to ensure we can always update this file, even when the operating system has some
# rude behavior with regard to file locking (ahem, windows...).
# and even more rudeness is that the pwd and $TMP may not always be in the same form,
fi
new_name="$TMP/zz_$(basename $0)"
\cp -f "$0" "$new_name"
- if [ $? -ne 0 ]; then
- echo "failed to copy this script up to the TMP directory. exploit attempted?"
- exit 1
- fi
+ test_or_die "failed to copy this script up to the TMP directory. exploit attempted?"
cd "$TMP"
chmod a+x "$new_name"
+ test_or_die "chmodding of file: $new_name"
exec "$new_name"
+ test_or_die "execing cloned getemscript"
fi
##############
export TMPO_CHK=$TMP/zz_chk.log
rm -f "$TMPO_CHK"
+test_or_die "removing file: $TMPO_CHK"
echo "getting repositories at: $(date)"
echo
# perform the checkouts as appropriate per OS.
FULL_LIST="$(dirname $FEISTY_MEOW_APEX) $HOME"
-#if [ "$OS" != "Windows_NT" ]; then
-# checkout_list $HOME /usr/local 2>&1 | tee -a "$TMPO_CHK"
-#else
if [ "$OS" == "Windows_NT" ]; then
FULL_LIST+="c:/ d:/ e:/"
fi
checkout_list $FULL_LIST 2>&1 | tee -a "$TMPO_CHK"
+test_or_die "checking out list: $FULL_LIST"
##############
# get everything from the origin.
git fetch origin
-
-# get everything from the origin.
-#needed? git pull
+test_or_die "running git fetch origin"
# turn off occasionally troublesome setting before checkin.
unset GIT_SSH
# send the little boat down the stream to the dependent repository.
git push downstream master
+test_or_die "running the git push downstream"
popd
# current working directory. all the directories under the passed directory
# will be examined.
+source "$FEISTY_MEOW_SCRIPTS/core/launch_feisty_meow.sh"
+source "$FEISTY_MEOW_SCRIPTS/rev_control/version_control.sh"
+
+##############
+
dir="$1"; shift
if [ -z "$dir" ]; then
dir=.
fi
-source "$FEISTY_MEOW_SCRIPTS/core/launch_feisty_meow.sh"
-source "$FEISTY_MEOW_SCRIPTS/rev_control/version_control.sh"
pushd "$dir" &>/dev/null
+test_or_die "changing to directory: $dir"
tempfile=$(generate_rev_ctrl_filelist)
popd &>/dev/null
perform_revctrl_action_on_file "$tempfile" do_checkin
+test_or_die "doing a check-in on: $tempfile"
# a directory as a parameter, but will default to the current directory.
# all the directories under the passed directory will be examined.
+source "$FEISTY_MEOW_SCRIPTS/core/launch_feisty_meow.sh"
+source "$FEISTY_MEOW_SCRIPTS/rev_control/version_control.sh"
+
+##############
+
dir="$1"; shift
if [ -z "$dir" ]; then
dir=.
fi
-source "$FEISTY_MEOW_SCRIPTS/core/launch_feisty_meow.sh"
-source "$FEISTY_MEOW_SCRIPTS/rev_control/version_control.sh"
-
pushd "$dir" &>/dev/null
+test_or_die "changing directory to: $dir"
tempfile=$(generate_rev_ctrl_filelist)
popd &>/dev/null
perform_revctrl_action_on_file "$tempfile" do_report_new
+test_or_die "running revision control report"
filename="$1"; shift
svn resolve --accept=working "$filename"
+test_or_die "resolving tree conflict by accepting the working directory as the right one"
# working directory. all the directories under the passed directory will
# be examined.
+source "$FEISTY_MEOW_SCRIPTS/core/launch_feisty_meow.sh"
+source "$FEISTY_MEOW_SCRIPTS/rev_control/version_control.sh"
+
+##############
+
dir="$1"; shift
if [ -z "$dir" ]; then
dir=.
fi
-source "$FEISTY_MEOW_SCRIPTS/core/launch_feisty_meow.sh"
-source "$FEISTY_MEOW_SCRIPTS/rev_control/version_control.sh"
-
pushd "$dir" &>/dev/null
+test_or_die "changing to directory: $dir"
tempfile=$(generate_rev_ctrl_filelist)
popd &>/dev/null
perform_revctrl_action_on_file "$tempfile" do_update
+test_or_die "running revision control update"
source "$FEISTY_MEOW_SCRIPTS/core/launch_feisty_meow.sh"
source "$FEISTY_MEOW_SCRIPTS/tty/terminal_titler.sh"
+##############
+
# the maximum depth that the recursive functions will try to go below the starting directory.
export MAX_DEPTH=5
# this function sets a variable called "home_system" to "true" if the
# machine is considered one of fred's home machines. if you are not
# fred, you may want to change the machine choices.
-export home_system=
function is_home_system()
{
# load up the name of the host.
get_our_hostname
# reset the variable that we'll be setting.
- home_system=
+ unset home_system
+ export home_system
if [[ $this_host == *.gruntose.blurgh ]]; then
home_system=true
fi
is_home_system
# special override to pick local servers when at home.
if [ "$home_system" == "true" ]; then
-#what was this section for again?
+#hmmm: this "home system" feature seems to be unnecessary?
if [ "$in_or_out" == "out" ]; then
# need the right home machine for modifier when checking out.
-#huhhh? modifier="svn://shaggy/"
modifier=
else
# no modifier for checkin.
fi
}
+##############
+
# selects the method for check-in based on where we are.
function do_checkin()
{
fi
local blatt="echo checking in '$nicedir'..."
+ local retval=0 # normally successful.
+
do_update "$directory"
- if [ $? -ne 0 ]; then
- echo "repository update failed; this should be fixed before check-in."
- return 1
- fi
+ retval=$?
+ test_or_die "repository update failed; this should be fixed before check-in."
+
pushd "$directory" &>/dev/null
- local retval=0 # normally successful.
if [ -f ".no-checkin" ]; then
echo "skipping check-in due to presence of .no-checkin sentinel file."
elif [ -d "CVS" ]; then
# only update if we see a repository living there.
if [ -d ".svn" ]; then
svn diff .
+ retval+=$?
elif [ -d ".git" ]; then
git diff
+ retval+=$?
elif [ -d "CVS" ]; then
cvs diff .
+ retval+=$?
fi
popd &>/dev/null
# yep, this path is absolute. just handle it directly.
if [ ! -d "$outer" ]; then continue; fi
do_checkin $outer
+ test_or_die "running check-in on: $outer"
sep 28
else
for inner in $list; do
local path="$inner/$outer"
if [ ! -d "$path" ]; then continue; fi
do_checkin $path
+ test_or_die "running check-in on: $path"
sep 28
done
fi
# yep, this path is absolute. just handle it directly.
if [ ! -d "$outer" ]; then continue; fi
do_update $outer
+ test_or_die "running update on: $path"
sep 28
else
for inner in $list; do
local path="$inner/$outer"
if [ ! -d "$path" ]; then continue; fi
do_update $path
+ test_or_die "running update on: $path"
sep 28
done
fi
# on each directory name, it performs the action (second parameter) provided.
function perform_revctrl_action_on_file()
{
-
-#hmmm: this doesn't capture any error returns!
-
local tempfile="$1"; shift
local action="$1"; shift
pushd "$dirname" &>/dev/null
echo "[$(pwd)]"
$action .
+ test_or_die "performing action $action on: $(pwd)"
sep 28
popd &>/dev/null
done 3<"$tempfile"
# simplistic approach here; just go to the folder and pull the changes.
pushd "$full_app_dir" &>/dev/null
-test_or_fail "Changing to app path '$full_app_dir'"
+test_or_die "Changing to app path '$full_app_dir'"
dir="avenger5/vendor/siteavenger/avcore"
if [ ! -d $dir ]; then
pushd "$dir" &>/dev/null
git pull
- test_or_fail "Pulling git repo for avcore under '$full_app_dir'"
+ test_or_die "Pulling git repo for avcore under '$full_app_dir'"
echo "Finished updating the avcore portion of site in ${app_dirname}."
# this should set the site_store_path variable if everything goes well.
update_repo "$full_app_dir" "$CHECKOUT_DIR_NAME" "$DEFAULT_REPOSITORY_ROOT" "$repo_name"
-test_or_fail "Updating the repository storage directory"
+test_or_die "Updating the repository storage directory"
# update the site to load dependencies.
sep
composer_repuff "$site_store_path"
-test_or_fail "Installing site dependencies with composer"
+test_or_die "Installing site dependencies with composer"
# set up the symbolic links needed to achieve siteliness.
sep
if [ ! -d "$appdir" ]; then
echo "Creating the apps directory: $appdir"
mkdir "$appdir"
- test_or_fail "Making apps directory when not already present"
+ test_or_die "Making apps directory when not already present"
fi
}
exit 1
elif [ $numdirs -eq 1 ]; then
app_dirname="$(basename $(find "$appsdir" -mindepth 1 -maxdepth 1 -type d) )"
- test_or_fail "Guessing application folder"
+ test_or_die "Guessing application folder"
else
# if more than one folder, force user to choose.
# Reference: https://askubuntu.com/questions/1705/how-can-i-create-a-select-menu-in-a-shell-script
PS3="$holdps3"
fi
test_app_folder "$appsdir" "$app_dirname"
- test_or_fail "Testing application folder: $app_dirname"
+ test_or_die "Testing application folder: $app_dirname"
echo "Application folder is: $app_dirname"
}
if [ ! -d "$combo" ]; then
echo "Creating app directory: $combo"
mkdir "$combo"
- test_or_fail "Making application directory when not already present"
+ test_or_die "Making application directory when not already present"
fi
}
if [ -f "$site_dir/bin/cake" ]; then
chmod -R a+rx "$site_dir/bin/cake"
- test_or_fail "Enabling execute bit on cake binary"
+ test_or_die "Enabling execute bit on cake binary"
fi
if [ -d "$site_dir/logs" ]; then
chmod -R g+w "$site_dir/logs"
- test_or_fail "Enabling group write on site's Logs directory"
+ test_or_die "Enabling group write on site's Logs directory"
fi
if [ -d "$site_dir/tmp" ]; then
chmod -R g+w "$site_dir/tmp"
- test_or_fail "Enabling group write on site's tmp directory"
+ test_or_die "Enabling group write on site's tmp directory"
fi
}
if [ -f "$site_dir/bin/cake" ]; then
# flush any cached objects from db.
"$site_dir/bin/cake" orm_cache clear
- test_or_fail "Clearing ORM cache"
+ test_or_die "Clearing ORM cache"
fi
}
unset site_store_path
pushd "$full_app_dir" &>/dev/null
- test_or_fail "Switching to our app dir '$full_app_dir'"
+ test_or_die "Switching to our app dir '$full_app_dir'"
local complete_path="$full_app_dir/$checkout_dirname"
if [ -d "$checkout_dirname" ]; then
# checkout directory exists, so let's check it.
pushd "$checkout_dirname" &>/dev/null
- test_or_fail "Switching to our checkout directory: $checkout_dirname"
+ test_or_die "Switching to our checkout directory: $checkout_dirname"
# ask for repository name (without .git).
if git rev-parse --git-dir > /dev/null 2>&1; then
# a repository was found, so update the version here and leave.
echo "Repository $repo_name exists. Updating it."
rgetem
- test_or_fail "Recursive checkout on: $complete_path"
+ test_or_die "Recursive checkout on: $complete_path"
else
# clone the repo since it wasn't found.
echo "Cloning repository $repo_name now."
git clone "$repo_root/$repo_name.git" $checkout_dirname
- test_or_fail "Git clone of repository: $repo_name"
+ test_or_die "Git clone of repository: $repo_name"
fi
fix_site_perms "$complete_path"
local site_store_path="$1"; shift
pushd "$site_store_path" &>/dev/null
- test_or_fail "Switching to our app dir '$site_store_path'"
+ test_or_die "Switching to our app dir '$site_store_path'"
echo "Updating site with composer..."
composer -n install
- test_or_fail "Composer installation step on '$site_store_path'."
+ test_or_die "Composer installation step on '$site_store_path'."
echo "Site updated."
#hmmm: argh global
# jump into the site path so we can start making relative links.
pushd "$site_store_path" &>/dev/null
- test_or_fail "Switching to our app dir '$site_store_path'"
+ test_or_die "Switching to our app dir '$site_store_path'"
pushd webroot &>/dev/null
# remove all symlinks that might plague us.
find . -maxdepth 1 -type l -exec rm -f {} ';'
- test_or_fail "Cleaning out links in webroot"
+ test_or_die "Cleaning out links in webroot"
# link in the avcore plugin.
make_safe_link "../vendor/siteavenger/avcore/webroot" avcore
if [ -L public ]; then
# public is a symlink.
\rm public
- test_or_fail "Removing public directory symlink"
+ test_or_die "Removing public directory symlink"
elif [ -d public ]; then
# public is a folder with default files.
#hmmm: is that safe?
\rm -rf public
- test_or_fail "Removing public directory and contents"
+ test_or_die "Removing public directory and contents"
fi
# create the main 'public' symlink
#hmmm: argh global
make_safe_link $CHECKOUT_DIR_NAME/webroot public
- test_or_fail "Creating link to webroot called 'public'"
+ test_or_die "Creating link to webroot called 'public'"
#hmmm: public/$themelower/im will be created automatically by system user with appropriate permissions
# this should set the site_store_path variable if everything goes well.
update_repo "$full_app_dir" "$CHECKOUT_DIR_NAME" "$DEFAULT_REPOSITORY_ROOT" "$repo_name"
-test_or_fail "Updating the repository storage directory"
+test_or_die "Updating the repository storage directory"
sep
# this should set the site_store_path variable if everything goes well.
update_repo "$full_app_dir" "$CHECKOUT_DIR_NAME" "$DEFAULT_REPOSITORY_ROOT" "$repo_name"
-test_or_fail "Updating the repository storage directory"
+test_or_die "Updating the repository storage directory"
####
local full_path="$BASE_PATH/$our_app"
if [ ! -d "$full_path" ]; then
mkdir -p $full_path
- test_or_fail "The app storage path could not be created.\n Path in question is: $full_path"
+ test_or_die "The app storage path could not be created.\n Path in question is: $full_path"
fi
# now give the web server some access to the folder. this is crucial since the folders
while [[ $chow_path != $HOME ]]; do
echo chow path is now $chow_path
chmod g+rx "$chow_path"
- test_or_fail "Failed to add group permissions on the path: $chow_path"
+ test_or_die "Failed to add group permissions on the path: $chow_path"
# reassert the user's ownership of any directories we might have just created.
chown $(logname) "$chow_path"
- test_or_fail "changing ownership to user failed on the path: $chow_path"
+ test_or_die "changing ownership to user failed on the path: $chow_path"
chow_path="$(dirname "$chow_path")"
done
}
#hmmm: why all the hard-coded paths below?
/bin/dd if=/dev/zero of=/var/swap.1 bs=1M count=1024
-test_or_fail "creating swap file"
+test_or_die "creating swap file"
/bin/chmod 600 /var/swap.1
-test_or_fail "setting swap file permissions"
+test_or_die "setting swap file permissions"
/sbin/mkswap /var/swap.1
-test_or_fail "formatting swap file as swap partition"
+test_or_die "formatting swap file as swap partition"
/sbin/swapon /var/swap.1
-test_or_fail "enabling new swap partition"
+test_or_die "enabling new swap partition"
free
source "$FEISTY_MEOW_SCRIPTS/core/launch_feisty_meow.sh"
sudo apt-get update -y
-test_or_fail "problem while doing 'apt-get update'"
+test_or_die "problem while doing 'apt-get update'"
# new magic to tell dpkg to go with existing config files. let's see if it works!
sudo apt-get dist-upgrade -y -o Dpkg::Options::="--force-confold"
-test_or_fail "problem while doing 'apt-get dist-upgrade'"
+test_or_die "problem while doing 'apt-get dist-upgrade'"