2 #------------------------------------------------------------------
4 # Purpose: A general purpose larsoft batch worker script.
6 # Adapted from condor_lBdetMC.sh by E. Church.
10 # condor_lar.sh [options]
14 # -c, --config <arg> - Configuration (fcl) file (required).
15 # -s, --source <arg> - Input file (full path).
16 # -S, --source-list <arg> - Input file list (full path, one per line).
17 # -o, --output <arg> - Output file name.
18 # -T, --TFileName <arg> - TFile output file name
19 # -n, --nevts <arg> - Number of events to process.
20 # --nskip <arg> - Number of events to skip.
21 # --nfile <arg> - Number of files to process per worker.
22 # --nfile_skip <arg> - Number of files to skip (use with option -S).
23 # --inputmode <arg> - Input mode ('textfile' or '', default '')
24 # --args <args...> - Arguments for lar command line (place at end).
26 # Sam and parallel project options.
28 # --sam_user <arg> - Specify sam user (default $GRID_USER).
29 # --sam_group <arg> - Specify sam group (default --group option).
30 # --sam_station <arg> - Specify sam station (default --group option).
31 # --sam_defname <arg> - Sam dataset definition name.
32 # --sam_project <arg> - Sam project name.
33 # --sam_start - Specify that this worker should be responsible for
34 # starting and stopping the sam project.
35 # --recur - Recursive input dataset (force snapshot).
36 # --sam_schema <arg> - Use this option with argument "root" to stream files using
37 # xrootd. Leave this option out for standard file copy.
38 # --os <arg> - A copy of the os argument passed to jobsub. May be used
39 # to affect definition of UPS_OVERRIDE.
40 # --njobs <arg> - Parallel project with specified number of jobs (default one).
41 # --data_file_type - Specify data file type (default "root," repeatable).
43 # Mix input options (second input stream).
45 # --mix_defname <arg> - Specify mix input sam dataset definition.
46 # --mix_project <arg> - Specify mix input sam project.
50 # --declare - Do sam declaration.
51 # --validate - Do validation checks.
52 # --copy - Copy output files directly to FTS dropbox instead of
54 # --maintain_parentage - Recalculate sam parentage metadata for multistage jobs.
55 # (Use with --validate and --declare).
59 # --ups <arg> - Comma-separated list of top level run-time ups products.
60 # -r, --release <arg> - Release tag.
61 # -q, -b, --build <arg> - Release build qualifier (default "debug", or "prof").
62 # --localdir <arg> - Larsoft local test release directory (default none).
63 # --localtar <arg> - Tarball of local test release.
64 # --mrb - Ignored (for compatibility).
65 # --srt - Exit with error status (SRT run time no longer supported).
69 # -h, --help - Print help.
70 # -i, --interactive - For interactive use.
71 # -g, --grid - No effect (allowed for compatibility).
72 # --group <arg> - Group or experiment (required).
73 # --workdir <arg> - No effect (allowed for compatibility).
74 # --outdir <arg> - Output directory (required).
75 # --logdir <arg> - Log directory (required).
76 # --dirsize <n> - Maximum directory size.
77 # --dirlevels <n> - Number of extra directory levels.
78 # --scratch <arg> - Scratch directory (only for interactive).
79 # --cluster <arg> - Job cluster (override $CLUSTER)
80 # --process <arg> - Process within cluster (override $PROCESS).
81 # --procmap <arg> - Name of process map file (override $PROCESS).
82 # --init-script <arg> - User initialization script execute.
83 # --init-source <arg> - User initialization script to source (bash).
84 # --end-script <arg> - User end-of-job script to execute.
85 # --mid-source <arg> - User midstage initialization script to source.
86 # --mid-script <arg> - User midstage finalization script to execute.
87 # --exe <arg> - Specify art-like executable (default "lar").
88 # --init <path> - Absolute path of environment initialization script.
92 # Run time environment setup.
94 # MRB run-time environmental setup is controlled by four options:
95 # --release (-r), --build (-b, -q), --localdir, and --localtar.
97 # a) Use option --release or -r to specify version of top-level product(s).
98 # b) Use option --build or -b to specify build full qualifiers (e.g.
99 # "debug:e5" or "e5:prof").
100 # c) Options --localdir or --localtar are used to specify your local
101 # test release. Use one or the other (not both).
103 # Use --localdir to specify the location of your local install
104 # directory ($MRB_INSTALL).
106 # Use --localtar to specify thye location of a tarball of your
107 # install directory (made relative to $MRB_INSTALL).
109 # Note that --localdir is not grid-friendly.
113 # 1. Each batch worker is uniquely identified by two numbers stored
114 # in environment variables $CLUSTER and $PROCESS (the latter is
115 # a small integer that starts from zero and varies for different
116 # jobs in a parallel job group). These environment variables are
117 # normally set by the batch system, but can be overridden by options
118 # --cluster, --process, and --procmap (e.g. to rerun failed jobs).
120 # 2. The work directory must be set to an existing directory owned
121 # by the submitter and readable by the batch worker. Files from the
122 # work directory are copied to the batch worker scratch directory at
123 # the start of the job.
125 # 3. A local test release may be specified as an absolute path using
126 # --localdir, or a tarball using --localtar. The location of the tarball
127 # may be specified as an absolute path visible on the worker, or a
128 # relative path relative to the work directory.
130 # 4. The output directory must exist and be writable by the batch
131 # worker (i.e. be group-writable for grid jobs). The worker
132 # makes a new subdirectory called ${CLUSTER}_${PROCESS} in the output
133 # directory and copies all files in the batch scratch directory there
134 # at the end of the job. If the output directory is not specified, the
135 # default is /grid/data/<group>/outstage/<user> (user is defined as
136 # owner of work directory).
138 # 5. Parallel projects are specified whenever --njobs is specified to
139 # be greater than one. Parallel projects are supported for single file,
140 # file list, and sam project input.
142 # In all cases, each worker processes some number of complete files.
143 # If the number of jobs is greater than the number of input files, some
144 # workers will not have any input files to process.
146 # In any case, options --nfile and --nevts can be used to limit the
147 # number of files or events that are processed by a single worker,
148 # regardless of the way files are divided among the workers.
150 # Option --njobs is incompatible with options --nskip, and --nfile_skip.
152 # a) Non-sam (single file or file list) input.
154 # In this case, input files are preassigned to workers such that all input
155 # files are approximately evenly divided among the workers. All files
156 # preassigned to this worker are copied to the scratch directory at the
159 # b) Sam project input.
161 # In this case, files are assigned to workers in a non-deterministic
162 # manner by the sam system. The sam system fetches input files to the
163 # scratch directory and deletes processed input files during job execution.
166 # 6. Using option -n or --nevts to limit number of events processed:
168 # a) If no input files are specified (e.g. mc generation), --nevts
169 # specifies total number of events among all workers.
171 # b) If input files are specified, --nevts specifies total number of
172 # events processed by each worker or from each input file, whichever
175 # 7. The interactive option (-i or --interactive) allows this script
176 # to be run interactively by overriding some settings that are normally
177 # obtained from the batch system, including $CLUSTER, $PROCESS, and
178 # the scratch directory. Interactive jobs always set PROCESS=0 (unless
179 # overridden by --process).
181 # 8. Mix options (--mix_defname, --mix_project) are only partially handled
182 # in this script. These options are parsed and their values are stored
183 # in shell variables. It is assumed that the sam project specified
184 # by --mix_project has been started externally, unless --sam_start is
185 # also specified, in which case this script will start the project.
186 # This script does not include any provision for joining the project.
187 # Further processing of these options (joining sam project, generating
188 # command line options or fcl wrappers) should be handled by user
189 # provided initialization scripts (--init-script, --init-source).
191 # 9. Option --init <path> is optional. If specified, it should point to
192 # the absolute path of the experiment environment initialization script,
193 # which path must be visible from the batch worker (e.g. /cvmfs/...).
194 # If this option is not specified, this script will look for and source
195 # a script with hardwired name "setup_experiment.sh" in directory
196 # ${CONDIR_DIR_INPUT}.
199 # Created: H. Greenlee, 29-Aug-2012
201 #------------------------------------------------------------------
258 declare -a DATAFILETYPES
260 while [ $# -gt 0 ]; do
265 awk '/^# Usage:/,/^# End options/{print $0}' $0 | cut -c3- | head -n -2
271 if [ $# -gt 1 ]; then
279 if [ $# -gt 1 ]; then
287 if [ $# -gt 1 ]; then
295 if [ $# -gt 1 ]; then
303 if [ $# -gt 1 ]; then
311 if [ $# -gt 1 ]; then
319 if [ $# -gt 1 ]; then
325 # Number of events to skip.
327 if [ $# -gt 1 ]; then
333 # Number of files to process.
335 if [ $# -gt 1 ]; then
341 # Number of files to skip.
343 if [ $# -gt 1 ]; then
349 # Number of parallel jobs.
351 if [ $# -gt 1 ]; then
357 # Specify data file types (repeatable).
359 if [ $# -gt 1 ]; then
360 ntype=${#DATAFILETYPES[@]}
361 DATAFILETYPES[$ntype]=$2
368 if [ $# -gt 1 ]; then
376 if [ $# -gt 1 ]; then
384 if [ $# -gt 1 ]; then
390 # Sam dataset definition name.
392 if [ $# -gt 1 ]; then
401 if [ $# -gt 1 ]; then
408 # Sam start/stop project flag.
420 if [ $# -gt 1 ]; then
428 if [ $# -gt 1 ]; then
434 # General arguments for lar command line.
436 if [ $# -gt 1 ]; then
443 # Top level ups products (comma-separated list).
445 if [ $# -gt 1 ]; then
453 if [ $# -gt 1 ]; then
459 # Release build qualifier.
461 if [ $# -gt 1 ]; then
467 # Local test release directory.
469 if [ $# -gt 1 ]; then
475 # Local test release tarball.
477 if [ $# -gt 1 ]; then
489 echo "SRT run time environment is no longer supported."
498 # Grid flag (no effect).
504 if [ $# -gt 1 ]; then
512 if [ $# -gt 1 ]; then
519 if [ $# -gt 1 ]; then
527 if [ $# -gt 1 ]; then
533 # Maximum directory size.
535 if [ $# -gt 1 ]; then
541 # Number of extra directory levels.
543 if [ $# -gt 1 ]; then
551 if [ $# -gt 1 ]; then
559 if [ $# -gt 1 ]; then
565 # Process within cluster.
567 if [ $# -gt 1 ]; then
575 if [ $# -gt 1 ]; then
581 # User initialization script.
583 if [ $# -gt 1 ]; then
589 # User source initialization script.
591 if [ $# -gt 1 ]; then
597 # User end-of-job script.
599 if [ $# -gt 1 ]; then
605 # User midstage initialization source script.
607 if [ $# -gt 1 ]; then
613 # User midstage finalization script.
615 if [ $# -gt 1 ]; then
621 # Declare good output root files to SAM.
626 # Run validation steps in project.py on root outputs directly in the job.
631 # Copy Output to FTS.
636 # Mix input sam dataset.
638 if [ $# -gt 1 ]; then
645 # Mix input sam project.
647 if [ $# -gt 1 ]; then
654 # Alter the output file's parentage such that it's parent(s) are from the input list OR sam process
655 --maintain_parentage )
659 # Specify alternate art-like executable.
661 if [ $# -gt 1 ]; then
667 # Specify environment initialization script path.
669 if [ $# -gt 1 ]; then
677 echo "Unknown option $1"
684 #echo "INFILE=$INFILE"
685 #echo "INLIST=$INLIST"
686 #echo "OUTFILE=$OUTFILE"
691 #echo "NFILE_SKIP=$NFILE_SKIP"
696 #echo "LOCALDIR=$LOCALDIR"
697 #echo "LOCALTAR=$LOCALTAR"
698 #echo "INTERACTIVE=$INTERACTIVE"
700 #echo "OUTDIR=$OUTDIR"
701 #echo "LOGDIR=$LOGDIR"
702 #echo "SCRATCH=$SCRATCH"
705 #echo "INITSCRIPT=$INITSCRIPT"
706 #echo "INITSOURCE=$INITSOURCE"
707 #echo "ENDSCRIPT=$ENDSCRIPT"
708 #echo "MIDSOURCE=$MIDSOURCE"
709 #echo "MIDSCRIPT=$MIDSCRIPT"
710 #echo "VALIDATE_IN_JOB=$VALIDATE_IN_JOB"
712 # Set default data file types ("root").
714 if [ ${#DATAFILETYPES[@]} -eq 0 ]; then
715 DATAFILETYPES[0]=root
718 # Done with arguments.
720 echo "Nodename: `hostname -f`"
727 if [ x$QUAL = x ]; then
731 if [ x$SAM_GROUP = x ]; then
735 if [ x$SAM_STATION = x ]; then
739 # Standardize sam_schema (xrootd -> root, xroot -> root).
741 if [ x$SAM_SCHEMA = xxrootd ]; then
744 if [ x$SAM_SCHEMA = xxroot ]; then
748 # Fix for sites with newer linux kernels:
749 # Do this only if OS is exclusively requested as SL6.
751 #if [ x$OS = xSL6 ]; then
753 # 3.*) export UPS_OVERRIDE="-H Linux64bit+2.6-2.12";;
754 # 4.*) export UPS_OVERRIDE="-H Linux64bit+2.6-2.12";;
757 echo "uname -r: `uname -r`"
758 echo "UPS_OVERRIDE: $UPS_OVERRIDE"
760 echo "Condor dir input: $CONDOR_DIR_INPUT"
762 # Initialize experiment ups products and mrb.
764 echo "Initializing ups and mrb."
766 if [ x$INIT != x ]; then
767 if [ ! -f $INIT ]; then
768 echo "Environment initialization script $INIT not found."
771 echo "Sourcing $INIT"
774 echo "Sourcing setup_experiment.sh"
775 source ${CONDOR_DIR_INPUT}/setup_experiment.sh
778 echo PRODUCTS=$PRODUCTS
779 echo "ups flavor: `ups flavor`"
781 # Set GROUP environment variable.
784 if [ x$GRP != x ]; then
787 echo "GROUP not specified."
793 # Set options for ifdh.
795 echo "X509_USER_PROXY = $X509_USER_PROXY"
796 echo "IFDH_OPT=$IFDH_OPT"
798 # Make sure fcl file argument was specified.
800 if [ x$FCL = x ]; then
801 echo "No configuration option (-c|--config) was specified."
805 # Make sure output directory exists and is writable.
807 if [ x$OUTDIR = x ]; then
808 echo "Output directory not specified."
811 echo "Output directory: $OUTDIR"
813 # Make sure log directory exists and is writable.
815 if [ x$LOGDIR = x ]; then
816 echo "Log directory not specified."
819 echo "Log directory: $LOGDIR"
821 # Make sure scratch directory is defined.
822 # For batch, the scratch directory is always $_CONDOR_SCRATCH_DIR
823 # For interactive, the scratch directory is specified by option
824 # --scratch or --outdir.
826 if [ $INTERACTIVE -eq 0 ]; then
827 SCRATCH=$_CONDOR_SCRATCH_DIR
829 if [ x$SCRATCH = x ]; then
833 if [ x$SCRATCH = x -o ! -d "$SCRATCH" -o ! -w "$SCRATCH" ]; then
834 echo "Local scratch directory not defined or not writable."
838 # Create the scratch directory in the condor scratch diretory.
839 # Copied from condor_lBdetMC.sh.
840 # Scratch directory path is stored in $TMP.
841 # Scratch directory is automatically deleted when shell exits.
843 # Do not change this section.
844 # It creates a temporary working directory that automatically cleans up all
845 # leftover files at the end.
846 TMP=`mktemp -d ${SCRATCH}/working_dir.XXXXXXXXXX`
847 TMP=${TMP:-${SCRATCH}/working_dir.$$}
849 { [[ -n "$TMP" ]] && mkdir -p "$TMP"; } || \
850 { echo "ERROR: unable to create temporary directory!" 1>&2; exit 1; }
851 trap "[[ -n \"$TMP\" ]] && { rm -rf \"$TMP\"; }" 0
854 # End of the section you should not change.
856 echo "Scratch directory: $TMP"
858 # Copy files from work directory to scratch directory.
860 echo "No longer fetching files from work directory."
861 echo "that's now done with using jobsub -f commands"
863 cp ${CONDOR_DIR_INPUT}/* ./work/
865 find . -name \*.tar -exec tar xf {} \;
866 find . -name \*.py -exec chmod +x {} \;
867 find . -name \*.sh -exec chmod +x {} \;
868 echo "Local working directoroy:"
873 # Save the hostname and condor job id.
875 hostname > hostname.txt
876 echo ${CLUSTER}.${PROCESS} > jobid.txt
878 # Set default CLUSTER and PROCESS environment variables for interactive jobs.
880 if [ $INTERACTIVE -ne 0 ]; then
881 CLUSTER=`date +%s` # From time stamp.
882 PROCESS=0 # Default zero for interactive.
885 # Override CLUSTER and PROCESS from command line options.
887 if [ x$CLUS != x ]; then
890 if [ x$PROC != x ]; then
893 if [ x$PROCMAP != x ]; then
894 if [ -f $PROCMAP ]; then
895 PROCESS=`sed -n $(( $PROCESS + 1 ))p $PROCMAP`
897 echo "Process map file $PROCMAP not found."
901 if [ x$CLUSTER = x ]; then
902 echo "CLUSTER not specified."
905 if [ x$PROCESS = x ]; then
906 echo "PROCESS not specified."
909 echo "Procmap: $PROCMAP"
910 echo "Cluster: $CLUSTER"
911 echo "Process: $PROCESS"
913 # Construct name of output subdirectory.
917 while [ $DIRLEVELS -gt 0 -a $DIRSIZE -gt 0 ]; do
918 parentdir=$(( $ndir % $DIRSIZE ))/$parentdir
919 ndir=$(( $ndir / $DIRSIZE ))
920 DIRLEVELS=$(( $DIRLEVELS - 1 ))
922 OUTPUT_SUBDIR=${parentdir}${CLUSTER}_${PROCESS}
923 echo "Output subdirectory: $OUTPUT_SUBDIR"
925 # Make sure fcl file exists.
927 if [ ! -f $FCL ]; then
928 echo "Configuration file $FCL does not exist."
932 # Make sure init script exists and is executable (if specified).
934 if [ x$INITSCRIPT != x ]; then
935 if [ -f "$INITSCRIPT" ]; then
938 echo "Initialization script $INITSCRIPT does not exist."
943 # Make sure init source script exists (if specified).
945 if [ x$INITSOURCE != x -a ! -f "$INITSOURCE" ]; then
946 echo "Initialization source script $INITSOURCE does not exist."
950 # Make sure end-of-job script exists and is executable (if specified).
952 if [ x$ENDSCRIPT != x ]; then
953 if [ -f "$ENDSCRIPT" ]; then
956 echo "Finalization script $ENDSCRIPT does not exist."
961 # Make sure midstage init source script exists (if specified).
963 if [ x$MIDSOURCE != x -a ! -f "$MIDSOURCE" ]; then
964 echo "Midstage initialization source script $MIDSOURCE does not exist."
968 # Make sure midstage finalization script exists and is executable (if specified).
970 if [ x$MIDSCRIPT != x ]; then
971 if [ -f "$MIDSCRIPT" ]; then
974 echo "Midstage finalization script $MIDSCRIPT does not exist."
979 # MRB run time environment setup goes here.
981 # Setup local test release, if any.
983 if [ x$LOCALDIR != x ]; then
987 # Copy test release directory recursively.
989 echo "Copying local test release from directory ${LOCALDIR}."
991 # Make sure ifdhc is setup.
993 if [ x$IFDHC_DIR = x ]; then
994 echo "Setting up ifdhc before fetching local directory."
997 echo "IFDHC_DIR=$IFDHC_DIR"
998 ifdh cp -r $IFDH_OPT $LOCALDIR .
1000 if [ $stat -ne 0 ]; then
1001 echo "ifdh cp failed with status ${stat}."
1004 find . -name \*.py -exec chmod +x {} \;
1005 find . -name \*.sh -exec chmod +x {} \;
1007 # Setup the environment.
1010 echo "Initializing localProducts from ${LOCALDIR}."
1011 if [ ! -f $TMP/local/setup ]; then
1012 echo "Local test release directory $LOCALDIR does not contain a setup script."
1015 sed "s@setenv MRB_INSTALL.*@setenv MRB_INSTALL ${TMP}/local@" $TMP/local/setup | \
1016 sed "s@setenv MRB_TOP.*@setenv MRB_TOP ${TMP}@" > $TMP/local/setup.local
1018 # Make sure we have the correct version of mrb setup
1020 if grep -q bin/shell_independence $TMP/local/setup.local; then
1022 # This is an old style working area.
1023 # Set up old version of mrb.
1025 echo "Setting up old version of mrb."
1032 . $TMP/local/setup.local
1033 #echo "MRB_INSTALL=${MRB_INSTALL}."
1034 #echo "MRB_QUALS=${MRB_QUALS}."
1035 #echo "Setting up all localProducts."
1036 #if [ x$IFDHC_DIR != x ]; then
1043 # Setup local larsoft test release from tarball.
1045 if [ x$LOCALTAR != x ]; then
1049 # Fetch the tarball.
1051 echo "Fetching test release tarball ${LOCALTAR}."
1053 # Make sure ifdhc is setup.
1055 if [ x$IFDHC_DIR = x ]; then
1056 echo "Setting up ifdhc before fetching tarball."
1059 echo "IFDHC_DIR=$IFDHC_DIR"
1060 ifdh cp $LOCALTAR local.tar
1062 if [ $stat -ne 0 ]; then
1063 echo "ifdh cp failed with status ${stat}."
1067 # Extract the tarball.
1071 # Setup the environment.
1074 echo "Initializing localProducts from tarball ${LOCALTAR}."
1075 sed "s@setenv MRB_INSTALL.*@setenv MRB_INSTALL ${TMP}/local@" $TMP/local/setup | \
1076 sed "s@setenv MRB_TOP.*@setenv MRB_TOP ${TMP}@" > $TMP/local/setup.local
1078 # Make sure we have the correct version of mrb setup
1080 if grep -q bin/shell_independence $TMP/local/setup.local; then
1082 # This is an old style working area.
1083 # Set up old version of mrb.
1085 echo "Setting up old version of mrb."
1092 . $TMP/local/setup.local
1093 #echo "MRB_INSTALL=${MRB_INSTALL}."
1094 #echo "MRB_QUALS=${MRB_QUALS}."
1095 #echo "Setting up all localProducts."
1096 #if [ x$IFDHC_DIR != x ]; then
1102 # Setup specified version of top level run time products
1103 # (if specified, and if local test release did not set them up).
1105 for prd in `echo $UPS_PRDS | tr , ' '`
1107 if ! ups active | grep -q $prd; then
1108 echo "Setting up $prd $REL -q ${QUAL}."
1109 if [ x$IFDHC_DIR != x -a x$IFBEAM_DIR = x ]; then
1112 setup $prd $REL -q $QUAL
1120 # In case mrb setup didn't setup a version of ifdhc, set up ifdhc again.
1122 if [ x$IFDHC_DIR = x ]; then
1123 echo "Setting up ifdhc again, because larsoft did not set it up."
1126 echo "IFDH_ART_DIR=$IFDH_ART_DIR"
1127 echo "IFDHC_DIR=$IFDHC_DIR"
1129 # Run/source optional initialization scripts.
1131 if [ x$INITSCRIPT != x ]; then
1132 echo "Running initialization script ${INITSCRIPT}."
1135 if [ $status -ne 0 ]; then
1140 if [ x$INITSOURCE != x ]; then
1141 echo "Sourcing initialization source script ${INITSOURCE}."
1144 if [ $status -ne 0 ]; then
1149 # Save a copy of the environment, which can be helpful for debugging.
1153 # Get input files to process, either single file, file list, or sam.
1155 # For non-sam non-xrootd input, copy all files local using ifdh cp, and make a
1156 # local file list called condor_lar_input.list. Save the remote file names (uri's)
1157 # in another file called transferred_uris.list
1159 # For non-sam xrootd input ("--sam_schema root") convert input list to xrootd uri's,
1162 rm -f condor_lar_input.list
1163 rm -f transferred_uris.list
1166 aunt_files=() #for data overaly, the data files being brought in are the output's aunts.
1168 if [ $USE_SAM -eq 0 -a x$INFILE != x ]; then
1172 # Don't allow any list-related options in single file case:
1173 # -S, --source-list, --nfile, --nfile_skip
1175 if [ x$INLIST != x -o $NFILE -ne 0 -o $NFILE_SKIP -ne 0 ]; then
1176 echo "File list options specified with single input file."
1180 #set the parent file to be the input file
1181 parent_files=("${parent_files[@]}" $INFILE)
1183 # Copy input file to scratch directoroy or convert to xrootd url.
1187 if [ x$SAM_SCHEMA = xroot ]; then
1188 XROOTD_URI=`file_to_url.sh $INFILE`
1190 if [ $XROOTD_URI != $INFILE ]; then
1191 echo $INFILE > transferred_uris.list
1192 echo $XROOTD_URI > condor_lar_input.list
1193 echo "Input xrootd uri: $XROOTD_URI"
1195 LOCAL_INFILE=`basename $INFILE`
1196 echo "Copying $INFILE"
1197 ifdh cp $INFILE $LOCAL_INFILE
1199 if [ $stat -ne 0 ]; then
1200 echo "ifdh cp failed with status ${stat}."
1203 if [ -f $LOCAL_INFILE -a $stat -eq 0 ]; then
1204 echo $INFILE > transferred_uris.list
1205 echo $LOCAL_INFILE > condor_lar_input.list
1207 echo "Error fetching input file ${INFILE}."
1212 elif [ $USE_SAM -eq 0 -a x$INLIST != x ]; then
1216 # Make sure input file list exists.
1218 if [ ! -f $INLIST ]; then
1219 echo "Input file list $INLIST does not exist."
1223 # Remember how many files are in the input file list.
1225 NFILE_TOTAL=`cat $INLIST | wc -l`
1226 echo "Input file list contains $NFILE_TOTAL total files."
1228 # Clamp the total number of files to be a maximum of NFILE * NJOBS, where
1229 # NFILE and NJOBS are specified via command line options. In project.py
1230 # terms, NFILE is <maxfilesperjob> and NOJBS is <numjobs>.
1232 MAX_TOTAL=$(( $NFILE * $NJOBS ))
1233 if [ $MAX_TOTAL -gt 0 -a $NFILE_TOTAL -gt $MAX_TOTAL ]; then
1234 NFILE_TOTAL=$MAX_TOTAL
1235 echo "Number of files to be processed will be limited to ${NFILE_TOTAL}."
1238 # If --njobs was specified, calculate how many files
1239 # to skip and process in this worker.
1241 if [ $NJOBS -ne 0 ]; then
1243 # Don't allow option --nfile_skip in this case.
1245 if [ $NFILE_SKIP -ne 0 ]; then
1246 echo "Illegal options specified with --njobs."
1250 # Clamp NJOBS to be a maximum of $NFILE_TOTAL.
1251 # This means that workers with $PROCESS >= $NFILE_TOTAL will not have
1252 # any input files to process.
1255 if [ $MYNJOBS -gt $NFILE_TOTAL ]; then
1256 MYNJOBS=$NFILE_TOTAL
1259 # Calculate number of files to skip and number of files to process.
1261 NFILE_SKIP=$(( $PROCESS * $NFILE_TOTAL / $MYNJOBS ))
1262 MYNFILE=$(( ( $PROCESS + 1 ) * $NFILE_TOTAL / $MYNJOBS - $NFILE_SKIP ))
1263 if [ $MYNFILE -eq 0 -o $NFILE_SKIP -ge $NFILE_TOTAL ]; then
1264 echo "This worker did not get any input files."
1267 if [ $MYNFILE -lt $NFILE -o $NFILE -eq 0 ]; then
1272 # Report number of files to skip and process.
1274 echo "Skipping $NFILE_SKIP files."
1275 if [ $NFILE -eq 0 ]; then
1276 echo "Processing all remaining files."
1278 echo "Processing $NFILE files."
1281 # Copy input files and construct local input file list.
1286 while read infile; do
1287 if [ $nfskip -gt 0 ]; then
1288 nfskip=$(( $nfskip - 1 ))
1291 # Retain the original file name as the local file name, if possible.
1292 # Otherwise, generate a new (hopefully) unique name.
1294 if [ ! -f condor_lar_input.list ]; then
1295 touch condor_lar_input.list
1299 if [ x$SAM_SCHEMA = xroot ]; then
1300 XROOTD_URI=`file_to_url.sh $infile`
1302 if [ $XROOTD_URI != $infile ]; then
1303 echo $infile >> transferred_uris.list
1304 echo $XROOTD_URI >> condor_lar_input.list
1305 echo "Input xrootd uri: $XROOTD_URI"
1307 LOCAL_INFILE=`basename $infile`
1308 if grep -q $LOCAL_INFILE condor_lar_input.list; then
1309 LOCAL_INFILE=input${nfile}.root
1310 if [ "$INMODE" = 'textfile' ]; then
1311 LOCAL_INFILE=input${nfile}.txt
1314 echo "Copying $infile"
1315 ifdh cp $infile $LOCAL_INFILE
1317 if [ $stat -ne 0 ]; then
1318 echo "ifdh cp failed with status ${stat}."
1321 if [ -f $LOCAL_INFILE -a $stat -eq 0 ]; then
1322 echo $infile >> transferred_uris.list
1323 echo $LOCAL_INFILE >> condor_lar_input.list
1324 parent_files=("${parent_files[@]}" $LOCAL_INFILE)
1326 echo "Error fetching input file ${infile}."
1330 nmax=$(( $nmax - 1 ))
1331 if [ $nmax -eq 0 ]; then
1335 nfile=$(( $nfile + 1 ))
1340 if [ $USE_SAM -eq 0 -a x$SAM_SCHEMA != xroot ]; then
1341 if [ -f condor_lar_input.list ]; then
1343 # Sort input list by decreasing size so we don't get a file with
1344 # zero events as the first file.
1346 #ls -S1 `cat condor_lar_input.list` > condor_lar_input.list
1347 xargs ls -s1 < condor_lar_input.list | sort -nr | awk '{print $2}' > newcondor_lar_input.list
1348 mv -f newcondor_lar_input.list condor_lar_input.list
1349 echo "Local input file list:"
1350 cat condor_lar_input.list
1351 NFILE_LOCAL=`cat condor_lar_input.list | wc -l`
1353 echo "No local input files."
1355 echo "Local input list has $NFILE_LOCAL files."
1358 #Break the master wrapper fcl into each stage
1364 if [ "$(echo $line | awk '{print $1}')" = "#---STAGE" ]; then
1365 stage="$(echo $line | awk '{print $2}')"
1366 stage_fcl="Stage$stage.fcl"
1367 nfcls=$(( $nfcls + 1 ))
1371 if [ "$line" = "#---END_STAGE" ]; then
1375 echo $line >> $stage_fcl
1378 #We now have nStage fcl files, each which need to be run serially
1381 echo "Start loop over stages"
1382 while [ $stage -lt $nfcls ]; do
1383 FCL="Stage$stage.fcl"
1385 # In case no input files were specified, and we are not getting input
1386 # from sam (i.e. mc generation), recalculate the subrun number, and the
1387 # number of events to generate in this worker.
1388 # This also applies to the textfile inputmode.
1389 # Note this only applies to the first stage by definition
1391 if [ $stage -eq 0 -a $USE_SAM -eq 0 ] && [ $NFILE_TOTAL -eq 0 -o "$INMODE" = 'textfile' ]; then #need to ask what is going on here
1394 # Don't allow --nskip.
1396 if [ $NSKIP -gt 0 ]; then
1397 echo "Illegal option --nskip specified with no input."
1403 NSKIP=$(( $PROCESS * $NEVT / $NJOBS ))
1404 NEV=$(( ( $PROCESS + 1 ) * $NEVT / $NJOBS - $NSKIP ))
1408 # Set subrun=$PROCESS+1 in a wrapper fcl file.
1410 SUBRUN=$(( $PROCESS + 1))
1411 cat <<EOF > subrun_wrapper.fcl
1414 source.firstSubRun: $SUBRUN
1417 if [ "$INMODE" = 'textfile' ]; then
1419 if [ $NFILE_LOCAL -ne 1 ]; then
1420 echo "Text file input mode specified with wrong number of input files."
1423 echo "physics.producers.generator.InputFileName: \"`cat condor_lar_input.list`\"" >> subrun_wrapper.fcl
1426 FCL=subrun_wrapper.fcl
1428 echo "MC subrun: $SUBRUN"
1429 echo "Number of MC events: $NEVT"
1433 # Sam stuff for main input.
1437 if [ $USE_SAM -ne 0 -a $stage -eq 0 ]; then
1440 # Make sure a project name has been specified.
1442 if [ x$SAM_PROJECT = x ]; then
1443 echo "No sam project was specified."
1446 echo "Sam project: $SAM_PROJECT"
1448 # Start project (if requested).
1450 if [ $SAM_START -ne 0 ]; then
1451 if [ x$SAM_DEFNAME != x ]; then
1453 # Do some preliminary tests on the input dataset definition.
1454 # If dataset definition returns zero files at this point, abort the job.
1455 # If dataset definition returns too many files compared to --nfile, create
1456 # a new dataset definition by adding a "with limit" clause.
1458 nf=`ifdh translateConstraints "defname: $SAM_DEFNAME" | wc -l`
1459 if [ $nf -eq 0 ]; then
1460 echo "Input dataset $SAM_DEFNAME is empty."
1463 if [ $NFILE -ne 0 -a $nf -gt $NFILE ]; then
1464 limitdef=${SAM_PROJECT}_limit_$NFILE
1466 # Check whether limit def already exists.
1467 # Have to parse commd output because ifdh returns wrong status.
1469 existdef=`ifdh describeDefinition $limitdef 2>/dev/null | grep 'Definition Name:' | wc -l`
1470 if [ $existdef -gt 0 ]; then
1471 echo "Using already created limited dataset definition ${limitdef}."
1473 ifdh createDefinition $limitdef "defname: $SAM_DEFNAME with limit $NFILE" $SAM_USER $SAM_GROUP
1475 # Assume command worked, because it returns the wrong status.
1477 echo "Created limited dataset definition ${limitdef}."
1480 # If we get to here, we know that we want to user $limitdef instead of $SAM_DEFNAME
1481 # as the input sam dataset definition.
1483 SAM_DEFNAME=$limitdef
1486 # If recursive flag, take snapshot of input dataset.
1488 if [ $RECUR -ne 0 ]; then
1489 echo "Forcing snapshot"
1490 SAM_DEFNAME=${SAM_DEFNAME}:force
1493 # Start the project.
1495 echo "Starting project $SAM_PROJECT using sam dataset definition $SAM_DEFNAME"
1496 ifdh startProject $SAM_PROJECT $SAM_STATION $SAM_DEFNAME $SAM_USER $SAM_GROUP
1497 if [ $? -eq 0 ]; then
1498 echo "Start project succeeded."
1500 echo "Start projet failed."
1505 if [ x$SAM_DEFNAME = x ]; then
1507 echo "Start project requested, but no definition was specified."
1514 # Get the project url of a running project (maybe the one we just started,
1515 # or maybe started externally). This command has to succeed, or we can't
1518 PURL=`ifdh findProject $SAM_PROJECT $SAM_STATION`
1519 if [ x$PURL = x ]; then
1520 echo "Unable to find url for project ${SAM_PROJECT}."
1523 echo "Project url: $PURL"
1526 # Start the consumer process. This command also has to succeed.
1531 # Parse fcl file to extract process_name, and use that
1532 # as the application name for starting the consumer process.
1534 APPNAME=`fhicl-dump $FCL | grep process_name: | head -1 | tr -d '"' | awk '{print $2}'`
1535 if [ $? -ne 0 ]; then
1536 echo "fhicl-dump $FCL failed to run. May be missing a ups product, library, or fcl file."
1539 if [ x$APPNAME = x ]; then
1540 echo "Trouble determining application name."
1546 # Make sure release version is not empty, or ifdh command line will be messed up.
1548 if [ x$REL = x ]; then
1552 # Make description, which is conventionally the jobsub job id.
1553 # This can not be empty.
1556 if [ x$DESC = x ]; then
1560 echo "Starting consumer process."
1561 echo "ifdh establishProcess $PURL $APPNAME $REL $NODE $SAM_USER $APPFAMILY $DESC $NFILE $SAM_SCHEMA"
1562 CPID=`ifdh establishProcess $PURL $APPNAME $REL $NODE $SAM_USER $APPFAMILY $DESC $NFILE $SAM_SCHEMA`
1563 if [ x$CPID = x ]; then
1564 echo "Unable to start consumer process for project url ${PURL}."
1567 echo "Consumer process id $CPID"
1570 # Stash away the project name and consumer process id in case we need them
1571 # later for bookkeeping.
1573 echo $SAM_PROJECT > sam_project.txt
1574 echo $CPID > cpid.txt
1578 # Sam stuff for secondary input.
1580 if [ $MIX_SAM -ne 0 ]; then
1581 echo "In Mix SAM if"
1583 # Make sure a project name has been specified.
1585 if [ x$MIX_PROJECT = x ]; then
1586 echo "No mix sam project was specified."
1589 echo "Mix project: $MIX_PROJECT"
1591 # Start mix project (if requested).
1593 if [ $SAM_START -ne 0 ]; then
1594 if [ x$MIX_DEFNAME != x ]; then
1596 echo "Starting project $MIX_PROJECT using sam dataset definition $MIX_DEFNAME"
1597 ifdh startProject $MIX_PROJECT $SAM_STATION $MIX_DEFNAME $SAM_USER $SAM_GROUP
1598 if [ $? -eq 0 ]; then
1599 echo "Start project succeeded."
1601 echo "Start projet failed."
1606 if [ x$MIX_DEFNAME = x ]; then
1608 echo "Start project requested, but no mix definition was specified."
1614 #Figure out output file names.
1615 #If outfile is not defined and we are inputing a single file or file list, follow our
1616 #convention that the output file should be %inputfilename_%systemtime_stage.root
1618 # Construct options for lar command line.
1620 LAROPT="-c $FCL --rethrow-default"
1621 echo "Laropt: $LAROPT"
1622 if [ -f condor_lar_input.list -a $stage -eq 0 ]; then
1623 if [ "$INMODE" != 'textfile' ]; then
1624 LAROPT="$LAROPT -S condor_lar_input.list" #artroot files to read in
1625 #AOUTFILE=`cat condor_lar_input.list`
1629 # Extract output file name for this stage.
1631 if echo $OUTFILE | grep -q :; then
1636 field=$(( $stage + 1 ))
1637 outfile_stage=`echo $OUTFILE | cut -d: -f$field`
1638 if [ x$outfile_stage != x ]; then
1639 outfile=$outfile_stage
1641 if [ x$outfile != x ]; then
1642 LAROPT="$LAROPT -o `basename $outfile .root`$stage.root"
1643 outstem=`basename $OUTFILE .root`
1646 if [ x$TFILE != x ]; then
1647 LAROPT="$LAROPT -T $TFILE"
1650 if [ $NEVT -ne 0 ]; then
1651 LAROPT="$LAROPT -n $NEVT"
1654 if [ $NSKIP -ne 0 ]; then
1655 LAROPT="$LAROPT --nskip $NSKIP"
1658 if [ x$PURL != x -a $stage -eq 0 ]; then
1659 LAROPT="$LAROPT --sam-web-uri $PURL"
1662 if [ x$CPID != x -a $stage -eq 0 ]; then
1663 LAROPT="$LAROPT --sam-process-id $CPID"
1666 if [ -n "$ARGS" ]; then
1667 LAROPT="$LAROPT $ARGS"
1670 # Source optional midstage initialization scripts.
1672 if [ x$MIDSOURCE != x ]; then
1673 echo "Sourcing midstage initialization source script ${MIDSOURCE}."
1676 if [ $status -ne 0 ]; then
1681 if [ $stage -ne 0 ]; then
1682 LAROPT="$LAROPT -s $next_stage_input"
1685 # Save a copy of the environment, which can be helpful for debugging.
1687 env > env${stage}.txt
1689 # Save a canonicalized version of the fcl configuration.
1691 fhicl-dump $FCL > cfgStage$stage.fcl
1693 # Dump proxy information.
1698 voms-proxy-info -all
1703 # Extract this stage exe.
1705 if echo $EXE | grep -q :; then
1710 field=$(( $stage + 1 ))
1711 exe_stage=`echo $EXE | cut -d: -f$field`
1712 if [ x$exe_stage != x ]; then
1716 echo "$exe $LAROPT" > commandStage$stage.txt
1717 $exe $LAROPT > larStage$stage.out 2> larStage$stage.err
1719 echo $stat > larStage$stage.stat
1720 echo "$exe completed with exit status ${stat}."
1721 if [ $stat -ne 0 ]; then
1725 voms-proxy-info -all
1727 echo "tail -1000 larStage$stage.out"
1729 tail -1000 larStage$stage.out
1731 echo "tail -1000 larStage$stage.err"
1733 tail -1000 larStage$stage.err
1739 if [ $USE_SAM -ne 0 -a $stage -eq 0 ]; then
1741 # Get list of consumed files.
1743 if [ x$CPID = x -a -f cpid.txt ]; then
1746 ifdh translateConstraints "consumer_process_id $CPID and consumed_status consumed" > consumed_files.list
1748 # End consumer process.
1750 ifdh endProcess $PURL $CPID
1752 # Stop project (if appropriate).
1754 nprj=`ifdh translateConstraints "snapshot_for_project_name $SAM_PROJECT" | wc -l`
1755 nconsumed=`ifdh translateConstraints "project_name $SAM_PROJECT and consumed_status consumed" | wc -l`
1756 echo "$nprj files in project, $nconsumed files consumed so far."
1758 if [ $SAM_START -ne 0 -o \( $nprj -gt 0 -a $nconsumed -eq $nprj \) ]; then
1759 echo "Stopping project."
1760 ifdh endProject $PURL
1764 #If lar returns a status other than 0, do not move on to other stages
1765 if [ $stat -ne 0 ]; then
1769 # Run optional midstage script.
1771 if [ x$MIDSCRIPT != x ]; then
1772 echo "Running midstage finalization script ${MIDSCRIPT}."
1773 ./${MIDSCRIPT} $stage
1775 if [ $status -ne 0 ]; then
1780 # Delete temporary input file.
1782 if [ $stage -ne 0 ]; then
1783 rm -rf $next_stage_input
1786 #echo `ls -t1 *.root | egrep -v 'hist|larlite|larcv' | head -n1`
1788 #echo "Outfile is $OUTFILE"
1791 next_stage_input=`ls -t1 *.root | egrep -v 'celltree|hist|larlite|larcv|Supplemental|TGraphs' | head -n1`
1793 # Don't let file name get too long.
1795 nc=`echo $next_stage_input | wc -c`
1796 if [ $nc -ge 200 ]; then
1797 base=`basename $next_stage_input`
1800 newstem=`echo $stem | cut -c1-150`_`uuidgen`
1801 echo "mv $next_stage_input ${newstem}.${ext}"
1802 mv $next_stage_input ${newstem}.${ext}
1803 next_stage_input=${newstem}.${ext}
1806 mixed_files=`sam_metadata_dumper $next_stage_input | grep mixparent | awk -F ":" '{gsub("\"" ,""); gsub(",",""); gsub(" ",""); print $2}' | sort -u`
1808 if [ x"$mixed_files" != x ]; then
1809 aunt_files=("${aunt_files[@]}" $mixed_files)
1814 #rename the mem and time profile DBs by stage
1816 if [ -f time.db ]; then
1817 mv time.db time$stage.db
1819 if [ -f mem.db ]; then
1820 mv mem.db mem$stage.db
1825 # Done looping over stages.
1827 # Secondary sam cleanups.
1829 if [ $MIX_SAM -ne 0 ]; then
1831 # Stop project (if appropriate).
1833 if [ $SAM_START -ne 0 ]; then
1834 echo "Stopping project."
1835 MURL=`ifdh findProject $MIX_PROJECT $SAM_STATION`
1836 ifdh endProject $MURL
1840 # Delete input files.
1842 if [ $USE_SAM -eq 0 -a x$SAM_SCHEMA != xroot -a -f condor_lar_input.list ]; then
1845 done < condor_lar_input.list
1848 # Run optional end-of-job script.
1850 if [ x$ENDSCRIPT != x ]; then
1851 echo "Running end-of-job script ${ENDSCRIPT}."
1854 if [ $status -ne 0 ]; then
1859 # Do root file checks.
1861 # Randomize names of data files that have a corresponding json file.
1862 # These are normally histogram files. Art files do not have external
1863 # json metadata at this point.
1865 # Also randomize the names of data files if there is no input specified
1866 # for this job (i.e. generator jobs).
1868 # Also randomize and shorten names of data files that are longer than
1872 if [ $USE_SAM -eq 0 -a x$INFILE = x -a x$INLIST = x ]; then
1876 for ftype in ${DATAFILETYPES[*]}; do
1877 for datafile in *.${ftype}; do
1878 if [ -f $datafile ]; then
1879 nc=`echo $datafile | wc -c`
1880 if [ -f ${datafile}.json -o $ran != 0 -o $nc -ge 200 ]; then
1881 base=`basename $datafile`
1884 newstem=`echo $stem | cut -c1-150`_`uuidgen`
1885 echo "mv $datafile ${newstem}.${ext}"
1886 mv $datafile ${newstem}.${ext}
1887 if [ -f ${datafile}.json ]; then
1888 mv ${datafile}.json ${newstem}.${ext}.json
1895 # Calculate root metadata for all data files and save as json file.
1896 # If json metadata already exists, merge with newly geneated root metadata.
1898 for ftype in ${DATAFILETYPES[*]}; do
1899 for datafile in *.${ftype}; do
1900 if [ -f $datafile ]; then
1901 json=${datafile}.json
1902 if [ -f $json ]; then
1903 ./root_metadata.py --output="${json}2" "$datafile" >& /dev/null
1904 ./merge_json.py $json ${json}2 > ${json}3
1905 mv -f ${json}3 $json
1908 ./root_metadata.py --output="$json" "$datafile" >& /dev/null
1914 #create a master lar.stat file which contains the overall exit code of all stages
1917 while [ $stageStat -lt $nfcls ]; do
1918 stat=`cat larStage$stageStat.stat`
1919 if [[ "$stat" = 65 && $ART_VERSION < v2_01 ]]; then
1920 # Workaround TimeTracker crash bug for input files with zero events.
1921 for json in *.json; do
1922 if grep -q '"events": *"0"' $json; then
1927 overallStat=$[$stat+$overallStat]
1929 #do some cleanup of intermediate files
1930 #rm Stage$stageStat.fcl
1931 stageStat=$[$stageStat +1]
1933 echo $overallStat > lar.stat
1934 valstat=$overallStat
1936 # Make local output directories for files that we have to save.
1941 # Stash all of the files we want to save in the local directories that we just created.
1943 # First move data files and corresponding .json files into the out and log subdirectories.
1945 for ftype in ${DATAFILETYPES[*]}; do
1946 for datafile in *.${ftype}; do
1947 if [ -f $datafile ]; then
1949 if [ -f ${datafile}.json ]; then
1950 mv ${datafile}.json log
1956 # Move any remaining files into the log subdirectory.
1958 for outfile in *; do
1959 if [ -f $outfile ]; then
1964 # Do validation (if requested).
1966 if [ $VALIDATE_IN_JOB -eq 1 ]; then
1967 #If SAM was used, get the parent files based on the cpid
1968 if [ $USE_SAM -ne 0 ]; then
1969 id=`cat log/cpid.txt`
1970 parent_files=($(ifdh translateConstraints "consumer_process_id=$id and consumed_status consumed"))
1972 if [ $stat -ne 0 ]; then
1973 echo "Failed to determine parentage."
1978 echo "The file's parents are: "
1980 for elt in ${parent_files[*]};
1985 echo "The file's aunts are: "
1986 for elt in ${aunt_files[*]};
1991 #if we are maintain the output's parentage, combine the file's parents and aunts into a flat string
1992 #this string will be interpretted by validate_in_job.py. If these are left empty, then validate_in_job will not change the file's parentage
1993 if [ $MAINTAIN_PARENTAGE -eq 1 ]; then
1994 export JOBS_PARENTS=`echo ${parent_files[*]}`
1995 export JOBS_AUNTS=`echo ${aunt_files[*]}`
1998 # Do validation function for the whole job.
2000 valstat=$overallStat
2001 if [ $valstat -eq 0 ]; then
2005 for ftype in ${DATAFILETYPES[*]}; do
2006 dataopt="$dataopt --data_file_type $ftype"
2008 echo "./validate_in_job.py --dir $curdir/out --logfiledir $curdir/log --outdir $OUTDIR/$OUTPUT_SUBDIR --declare $DECLARE_IN_JOB --copy $COPY_TO_FTS --maintain_parentage $MAINTAIN_PARENTAGE $dataopt"
2009 ./validate_in_job.py --dir $curdir/out --logfiledir $curdir/log --outdir $OUTDIR/$OUTPUT_SUBDIR --declare $DECLARE_IN_JOB --copy $COPY_TO_FTS --maintain_parentage $MAINTAIN_PARENTAGE $dataopt
2016 # Make a tarball of the log directory contents, and save the tarball in the log directory.
2019 tar -cjf log.tar -C log .
2022 # For copy back, setup up current version of ifdhc.
2023 # May be different than version setup by larsoft.
2025 echo "Setting up current version of ifdhc."
2026 if [ x$IFDHC_DIR != x ]; then
2030 echo "IFDHC_DIR=$IFDHC_DIR"
2032 # Create remote output and log directories.
2034 export IFDH_CP_MAXRETRIES=5
2036 echo "Make directory ${LOGDIR}/${OUTPUT_SUBDIR}."
2038 subdir=$OUTPUT_SUBDIR
2040 while echo $subdir | grep -q /; do
2041 dir=${dir}/${subdir%%/*}
2043 echo "ifdh mkdir $IFDH_OPT $dir"
2044 ifdh mkdir $IFDH_OPT $dir
2046 echo "ifdh mkdir $IFDH_OPT ${LOGDIR}/$OUTPUT_SUBDIR"
2047 ifdh mkdir $IFDH_OPT ${LOGDIR}/$OUTPUT_SUBDIR
2048 echo "Done making directory ${LOGDIR}/${OUTPUT_SUBDIR}."
2051 if [ ${OUTDIR} != ${LOGDIR} ]; then
2052 echo "Make directory ${OUTDIR}/${OUTPUT_SUBDIR}."
2054 subdir=$OUTPUT_SUBDIR
2056 while echo $subdir | grep -q /; do
2057 dir=${dir}/${subdir%%/*}
2059 echo "ifdh mkdir $IFDH_OPT $dir"
2060 ifdh mkdir $IFDH_OPT $dir
2062 echo "ifdh mkdir $IFDH_OPT ${OUTDIR}/$OUTPUT_SUBDIR"
2063 ifdh mkdir $IFDH_OPT ${OUTDIR}/$OUTPUT_SUBDIR
2064 echo "Done making directory ${OUTDIR}/${OUTPUT_SUBDIR}."
2068 # Transfer tarball in log subdirectory.
2073 echo "ifdh cp -D $IFDH_OPT log/log.tar ${LOGDIR}/$OUTPUT_SUBDIR"
2074 ifdh cp -D $IFDH_OPT log/log.tar ${LOGDIR}/$OUTPUT_SUBDIR
2077 if [ $stat -ne 0 ]; then
2079 echo "ifdh cp failed with status ${stat}."
2082 # Transfer data files in out subdirectory.
2084 if [ $COPY_TO_FTS -eq 0 ]; then
2086 if [ "$( ls -A out )" ]; then
2087 echo "ifdh cp -D $IFDH_OPT out/* ${OUTDIR}/$OUTPUT_SUBDIR"
2088 ifdh cp -D $IFDH_OPT out/* ${OUTDIR}/$OUTPUT_SUBDIR
2090 if [ $stat -ne 0 ]; then
2092 echo "ifdh cp failed with status ${stat}."
2098 if [ $statout -eq 0 -a -f log/lar.stat ]; then
2099 statout=`cat log/lar.stat`
2102 if [ $statout -eq 0 ]; then