Skip to content
This repository has been archived by the owner. It is now read-only.
Permalink
chunk_stitching
Switch branches/tags

Name already in use

A tag already exists with the provided branch name. Many Git commands accept both tag and branch names, so creating this branch may cause unexpected behavior. Are you sure you want to create this branch?
Go to file
 
 
Cannot retrieve contributors at this time
executable file 389 lines (370 sloc) 11 KB
#!/bin/bash
set -e
. ./ssh_tools.sh
# Parse command line args
debug=false
rerun_registration=false
rerun_stitching=false
rerun_average=false
rerun_colourlabels=false
rerun_queenframe=false
rerun_cleanqueen=false
skip_chunks=false
skip_stitching=false
dry_run=false
keepfiles=false
max_queued=200
for i in "$@"
do
case $i in
--uuid=*)
uuid="${i#*=}"
shift
;;
--rerun-registration)
rerun_registration=true
shift
;;
--rerun-stitching)
rerun_stitching=true
shift
;;
--rerun-average)
rerun_average=true
shift
;;
--rerun-colourlabels)
rerun_colourlabels=true
shift
;;
--rerun-queenframe)
rerun_queenframe=true
shift
;;
--rerun-cleanqueen)
rerun_cleanqueen=true
shift
;;
--skip-chunks)
skip_chunks=true
shift
;;
--skip-stitching)
skip_stitching=true
shift
;;
--hostname=*)
hostname="${i#*=}"
shift
;;
--sshcmd=*)
sshcmd="${i#*=}"
shift
;;
--debug)
debug=true
shift
;;
--dry-run)
dry_run=true
shift
;;
--njobs=*)
njobs="${i#*=}"
shift
;;
--max-queued=*)
max_queued="${i#*=}"
shift
;;
--keepfiles)
keepfiles=true
shift
;;
*)
input=$i
shift
;;
esac
done
if [[ -z ${input+x} ]]; then
echo "input must be specified"
exit 1
fi
if [[ $debug = true ]]; then
set -x
fi
# Setup basic file stuff
prefix=${input%.*}
basename=$(basename "$prefix")
dirname=$(dirname "$prefix")
basepath=$(basename "$dirname")
# Read config file
if [[ ! -f "$prefix.cfg" ]]; then
echo "No config file"
exit 1
fi
while IFS="=" read -r name value; do
if [[ "$name" =~ ^(classifier)$ ]]; then
declare "$name=$(dirname $input)/$value"
elif [[ "$name" =~ ^(colourica)$ ]]; then
declare "$name=$(dirname $input)/$value"
else
declare "$name=$value"
fi
done < "$prefix.cfg"
# Parse local config file
while IFS="=" read -r name value; do
declare "$name=$value"
done < "$HOME/.cuttleline.cfg"
# Check if hostname or sshcmd was specified
if [[ -z ${hostname+x} ]] && [[ -z ${sshcmd+x} ]]; then
echo "hostname or sshcmd must be specified"
exit 1
fi
if [[ -z ${sshcmd+x} ]]; then
sshcmd="ssh $hostname"
fi
# Parse host config file
while IFS="=" read -r name value; do
declare "$name=$value"
done < <(retry_ssh "cat ~/.cuttleline.cfg")
if [[ -z ${code_dir+x} ]]; then
echo "code_dir must be specified"
exit 1
fi
if [[ -z ${working_dir+x} ]]; then
echo "working_dir must be specified"
exit 1
fi
if [[ -z ${mpicmd+x} ]]; then
echo "mpicmd must be specified"
exit 1
fi
if [[ -z ${classifier+x} ]]; then
echo "classifier must be specified"
exit 1
fi
if [[ -z ${colourica+x} ]]; then
echo "colourica must be specified"
exit 1
fi
# Generate path for output files
if [[ -z ${outdir+x} ]]; then
$outdir="$dirname/.."
fi
outdir="$outdir/$basepath"
mkdir -p "$outdir"
# Run chunking
chunks="$prefix.chunks"
chunktimes="$outdir/$basename.chunktimes"
if [[ ! -f $chunktimes ]] \
|| [[ "$chunktimes" -ot "$input" ]] \
|| [[ "$chunktimes" -ot "$chunks" ]]; then
echo "Chunking video"
# Delete old chunks
if [[ -f $chunktimes ]]; then
while read -r start_t end_t video_start; do
chunk="$outdir/$basename-$start_t-$end_t.mp4"
segmentation="$outdir/$basename-$start_t-$end_t.seg"
segmentation="$outdir/$basename-$start_t-$end_t.seg.log"
registration="$outdir/$basename-$start_t-$end_t.reg"
registration="$outdir/$basename-$start_t-$end_t.reg.log"
masterframe="$outdir/$basename-$start_t-$end_t.masterframe"
masterframe="$outdir/$basename-$start_t-$end_t.masterframe.log"
chunkaverage="$outdir/$basename-$start_t-$end_t.chunkaverage"
chunkaverage="$outdir/$basename-$start_t-$end_t.chunkaverage.log"
rm -f "$chunk" "$segmentation" "$segmentation_log" \
"$registration" "$registration_log" \
"$masterframe" "$masterframe_log" \
"$chunkaverage" "$chunkaverage_log"
done < $chunktimes
rm -f $chunktimes
fi
# Get chunking
pipe=$(mktemp -u)
mkfifo $pipe || exit 1
./chunking/output_chunktimes.py "$chunks" > $pipe &
pid=$!
start_times=()
end_times=()
while read -r start_t end_t; do
start_times+=("$start_t")
end_times+=("$end_t")
done < $pipe
rm $pipe
wait $pid || {
>&2 echo "Could not read chunktimes. Skipping video $input ..."
exit 0
}
# Process chunks
chunktimes_tmp=$(mktemp -u)
for i in "${!start_times[@]}"; do
start_t=${start_times[$i]}
end_t=${end_times[$i]}
chunk="$outdir/$basename-$start_t-$end_t.mp4"
fps=$(ffmpeg -i $input 2>&1 \
| sed -n "s/.*, \(.*\) fp.*/\1/p")
start_secs=$(echo "$start_t $fps" \
| awk '{printf "%0.3f", $1 / $2}')
end_secs=$(echo "$end_t $fps" \
| awk '{printf "%0.3f", $1 / $2}')
if [[ ! -f $chunk ]] \
|| [[ $chunk -ot $input ]]; then
ffmpeg \
-y \
-ss $start_secs \
-i $input \
-t $end_secs \
-codec copy \
-copyts \
-metadata comment="$start_secs:$end_secs:$input" \
-loglevel error \
$chunk
fi
video_start=$(ffmpeg -i $chunk 2>&1 \
| sed -n "s/.*, start: \(.*\),.*/\1/p")
echo "$start_t $end_t $video_start $start_secs $end_secs" >> $chunktimes_tmp
done
mv "$chunktimes_tmp" "$chunktimes"
fi
# Determine files to be transferred
syncfiles="$prefix.syncfiles"
rm -f $syncfiles
while read -r start end video_start; do
if [[ $skip_chunks = false ]];then
echo "+ $basename-$start-$end.mp4" >> $syncfiles
echo "+ $basename-$start-$end.mp4.log" >> $syncfiles
fi
echo "+ $basename-$start-$end.seg" >> $syncfiles
echo "+ $basename-$start-$end.seg.log" >> $syncfiles
echo "+ $basename-$start-$end.reg" >> $syncfiles
echo "+ $basename-$start-$end.reg.log" >> $syncfiles
echo "+ $basename-$start-$end.masterframe" >> $syncfiles
echo "+ $basename-$start-$end.masterframe.log" >> $syncfiles
echo "+ $basename-$start-$end.chunkaverage" >> $syncfiles
echo "+ $basename-$start-$end.chunkaverage.log" >> $syncfiles
echo "+ $basename-$start-$end.cstitching" >> $syncfiles
echo "+ $basename-$start-$end.cstitching.log" >> $syncfiles
done < $chunktimes
echo "+ $basename.chunktimes" >> $syncfiles
echo "+ $basename.stitching" >> $syncfiles
echo "+ $basename.stitching.log" >> $syncfiles
echo "+ $basename.queenframe" >> $syncfiles
echo "+ $basename.queenframe.log" >> $syncfiles
echo "+ $basename.average" >> $syncfiles
echo "+ $basename.average.log" >> $syncfiles
echo "+ $basename.colourlabels" >> $syncfiles
echo "+ $basename.colourlabels.log" >> $syncfiles
echo "+ $basename.cleanqueen" >> $syncfiles
echo "+ $basename.cleanqueen.log" >> $syncfiles
echo "+ $basename.areas" >> $syncfiles
echo "+ $basename.areas.log" >> $syncfiles
echo "- *" >> $syncfiles
# Make working dir on remote
if [[ -z ${uuid+x} ]]; then
uuid=$(uuidgen)
fi
echo "Job Id: $uuid"
working_dir="$working_dir/$uuid"
retry_ssh "mkdir -p $working_dir"
echo "Sending shared files"
retry_rsync -ap "$classifier" :"$working_dir"/"classifier.clf"
retry_rsync -ap "$colourica" :"$working_dir"/"colourica.ica"
echo "Sending files"
retry_rsync -ap \
--include-from="$syncfiles" \
"$outdir"/ :"$working_dir" \
--delete
echo "Submitting job"
cmd="$code_dir/run_pipeline_remote.sh \
--background=\"$background\" \
--foreground=\"$foreground\" \
$(if [[ ! -z ${partition+x} ]]; then
echo --partition="\"$partition\""
fi) \
$(if [[ $debug = true ]]; then
echo "--debug"
fi) \
$(if [[ $rerun_registration = true ]]; then
echo "--rerun-registration"
fi) \
$(if [[ $rerun_stitching = true ]]; then
echo "--rerun-stitching"
fi) \
$(if [[ $rerun_average = true ]]; then
echo "--rerun-average"
fi) \
$(if [[ $rerun_colourlabels = true ]]; then
echo "--rerun-colourlabels"
fi) \
$(if [[ $rerun_queenframe = true ]]; then
echo "--rerun-queenframe"
fi) \
$(if [[ $rerun_cleanqueen = true ]]; then
echo "--rerun-cleanqueen"
fi) \
$(if [[ $skip_chunks = true ]]; then
echo "--skip-chunks"
fi) \
$(if [[ $skip_stitching = true ]]; then
echo "--skip-stitching"
fi) \
--mpicmd=\"$mpicmd\" \
--segmentation-slurm=\"$segmentation_slurm\" \
--segmentation-args=\"$segmentation_args\" \
--registration-slurm=\"$registration_slurm\" \
--registration-args=\"$registration_args\" \
--masterframe-slurm=\"$masterframe_slurm\" \
--masterframe-args=\"$masterframe_args\" \
--chunkaverage-slurm=\"$chunkaverage_slurm\" \
--chunkaverage-args=\"$chunkaverage_args\" \
--chunk-stitching-slurm=\"$chunk_stitching_slurm\" \
--chunk-stitching-args=\"$chunk_stitching_args\" \
--stitching-slurm=\"$stitching_slurm\" \
--stitching-args=\"$stitching_args\" \
--queenframe-slurm=\"$queenframe_slurm\" \
--queenframe-args=\"$queenframe_args\" \
--average-slurm=\"$average_slurm\" \
--average-args=\"$average_args\" \
--colourlabels-slurm=\"$colourlabels_slurm\" \
--colourlabels-args=\"$colourlabels_args\" \
--cleanqueen-slurm=\"$cleanqueen_slurm\" \
--cleanqueen-args=\"$cleanqueen_args\" \
--areas-slurm=\"$areas_slurm\" \
--areas-args=\"$areas_args\" \
--max-queued=\"$max_queued\" \
\"$working_dir/$basename\""
jobid=$(retry_ssh "exec /bin/sh -s" <<< $cmd \
| sed -n 's/.*FINAL_JOBID=\(\w*\).*/\1/p'; \
if [[ "${PIPESTATUS[0]}" -eq "0" ]]; then true; else false; fi)
echo "Waiting for job"
while true; do
queued_jobs=($(retry_ssh "squeue -o '%i' --noheader"))
job_running=false
for queued_job in "${queued_jobs[@]}"; do
if [[ "$queued_job" = "$jobid" ]]; then
job_running=true
fi
done
if [[ "$job_running" = true ]]; then
retry_rsync -ap \
--include-from="$syncfiles" \
:"$working_dir"/ "$outdir" \
--delete
sleep 900
else
break
fi
done
echo "Syncing back files ..."
retry_rsync -ap \
--include-from="$syncfiles" \
:"$working_dir"/ "$outdir" \
--delete
rm -r $syncfiles
# Remove working dir
echo "Cleaning up"
if [[ $dry_run = false ]] && [[ $keepfiles = false ]]; then
retry_ssh "rm -rf $working_dir"
fi