Hello,
I ran my scrip that I created using proc.py, and it keeps giving me a “current memory mallocated” error. Below is part of the output and I also attach my script.
Sungjin
3dDeconvolve -input pb05.265003.r01.scale+tlrc.HEAD pb05.265003.r02.scale+tlrc.HEAD pb05.265003.r03.scale+tlrc.HEAD pb05.265003.r04.scale+tlrc.HEAD pb05.265003.r05.scale+tlrc.HEAD pb05.265003.r06.scale+tlrc.HEAD -censor motion_265003_censor.1D -ortvec mot_demean.r01.1D mot_demean_r01 -ortvec mot_demean.r02.1D mot_demean_r02 -ortvec mot_demean.r03.1D mot_demean_r03 -ortvec mot_demean.r04.1D mot_demean_r04 -ortvec mot_demean.r05.1D mot_demean_r05 -ortvec mot_demean.r06.1D mot_demean_r06 -ortvec mot_deriv.r01.1D mot_deriv_r01 -ortvec mot_deriv.r02.1D mot_deriv_r02 -ortvec mot_deriv.r03.1D mot_deriv_r03 -ortvec mot_deriv.r04.1D mot_deriv_r04 -ortvec mot_deriv.r05.1D mot_deriv_r05 -ortvec mot_deriv.r06.1D mot_deriv_r06 -polort 3 -num_stimts 1 -stim_times 1 stimuli/Timing_CGE.txt GAM -stim_label 1 Timing_CGE.txt -jobs 4 -GOFORIT 5 -fout -tout -x1D X.xmat.1D -xjpeg X.jpg -x1D_uncensored X.nocensor.xmat.1D -errts errts.265003 -bucket stats.265003
++ 3dDeconvolve extending num_stimts from 1 to 73 due to -ortvec
++ 3dDeconvolve: AFNI version=AFNI_19.0.26 (Mar 20 2019) [64-bit]
++ Authored by: B. Douglas Ward, et al.
++ current memory malloc-ated = 1,521,978 bytes (about 1.5 million)
++ loading dataset pb05.265003.r01.scale+tlrc.HEAD pb05.265003.r02.scale+tlrc.HEAD pb05.265003.r03.scale+tlrc.HEAD pb05.265003.r04.scale+tlrc.HEAD pb05.265003.r05.scale+tlrc.HEAD pb05.265003.r06.scale+tlrc.HEAD
Killed
#!/bin/tcsh -xef
echo “auto-generated by afni_proc.py, Tue Oct 15 15:34:29 2019”
echo “(version 6.32, February 22, 2019)”
echo “execution started: date
”
to execute via tcsh:
tcsh -xef proc.265001 |& tee output.proc.265001
to execute via bash:
tcsh -xef proc.265001.s1 2>&1 | tee output.proc.265001.s1
=========================== auto block: setup ============================
script setup
take note of the AFNI version
afni -ver
check that the current AFNI version is recent enough
afni_history -check_date 17 Jan 2019
if ( $status ) then
echo “** this script requires newer AFNI binaries (than 17 Jan 2019)”
echo " (consider: @update.afni.binaries -defaults)"
exit
endif
the user may specify a single subject to run with
if ( $#argv > 0 ) then
set subj = $argv[1]
else
set subj = 265001
endif
set sNUM = 1
assign output directory name
set output_dir = ${subj}.s$sNUM.results
verify that the results directory does not yet exist
if ( -d $output_dir ) then
echo output dir “$subj.results” already exists
exit
endif
set list of runs
set runs = (count -digits 2 1 6
)
create results and stimuli directories
mkdir $output_dir
mkdir $output_dir/stimuli
copy stim files into stimulus directory
cp /home/sungjin/fMRI/CGE/CGE_Raw_Data/Timing_CGE.txt $output_dir/stimuli
copy anatomy to results dir
3dcopy CGE_Raw_Data/Session1/${subj}_S$sNUM/subject_raw/anatSS.SSW.nii
$output_dir/anatSS.SSW
copy external -tlrc_NL_warped_dsets datasets
3dcopy CGE_Raw_Data/Session$sNUM/${subj}_S$sNUM/subject_raw/anatQQ.SSW.nii
$output_dir/anatQQ.SSW
3dcopy CGE_Raw_Data/Session$sNUM/${subj}_S$sNUM/subject_raw/anatQQ.SSW.aff12.1D
$output_dir/anatQQ.SSW.aff12.1D
3dcopy CGE_Raw_Data/Session$sNUM/${subj}_S$sNUM/subject_raw/anatQQ.SSW_WARP.nii
$output_dir/anatQQ.SSW_WARP.nii
============================ auto block: tcat ============================
apply 3dTcat to copy input dsets to results dir,
while removing the first 0 TRs
3dTcat -prefix $output_dir/pb00.$subj.r01.tcat
CGE_Raw_Data/Session1/${subj}_S$sNUM/subject_raw/deob.rest1.s1+orig’[0…$]’
3dTcat -prefix $output_dir/pb00.$subj.r02.tcat
CGE_Raw_Data/Session1/${subj}_S$sNUM/subject_raw/deob.pv.s1+orig’[0…$]’
3dTcat -prefix $output_dir/pb00.$subj.r03.tcat
CGE_Raw_Data/Session1/${subj}_S$sNUM/subject_raw/deob.14p.s1+orig’[0…$]’
3dTcat -prefix $output_dir/pb00.$subj.r04.tcat
CGE_Raw_Data/Session1/${subj}_S$sNUM/subject_raw/deob.6p.s1+orig’[0…$]’
3dTcat -prefix $output_dir/pb00.$subj.r05.tcat
CGE_Raw_Data/Session1/${subj}_S$sNUM/subject_raw/deob.flk.s1+orig’[0…$]’
3dTcat -prefix $output_dir/pb00.$subj.r06.tcat
CGE_Raw_Data/Session1/${subj}_S$sNUM/subject_raw/deob.rest2.s1+orig’[0…$]’
and make note of repetitions (TRs) per run
set tr_counts = ( 180 150 150 150 150 180 )
-------------------------------------------------------
enter the results directory (can begin processing data)
cd $output_dir
========================== auto block: outcount ==========================
data check: compute outlier fraction for each volume
touch out.pre_ss_warn.txt
foreach run ( $runs )
3dToutcount -automask -fraction -polort 3 -legendre
pb00.$subj.r$run.tcat+orig > outcount.r$run.1D
# outliers at TR 0 might suggest pre-steady state TRs
if ( `1deval -a outcount.r$run.1D"{0}" -expr "step(a-0.4)"` ) then
echo "** TR #0 outliers: possible pre-steady state TRs in run $run" \
>> out.pre_ss_warn.txt
endif
end
catenate outlier counts into a single time series
cat outcount.r*.1D > outcount_rall.1D
get run number and TR index for minimum outlier volume
set minindex = 3dTstat -argmin -prefix - outcount_rall.1D\'
set ovals = ( 1d_tool.py -set_run_lengths $tr_counts \ -index_to_run_tr $minindex
)
save run and TR indices for extraction of vr_base_min_outlier
set minoutrun = $ovals[1]
set minouttr = $ovals[2]
echo “min outlier: run $minoutrun, TR $minouttr” | tee out.min_outlier.txt
================================= tshift =================================
time shift data so all slice timing is the same
foreach run ( $runs )
3dTshift -tzero 0 -quintic -prefix pb01.$subj.r$run.tshift
pb00.$subj.r$run.tcat+orig
end
================================ despike =================================
apply 3dDespike to each run
foreach run ( $runs )
3dDespike -NEW -nomask -prefix pb02.$subj.r$run.despike
pb01.$subj.r$run.tshift+orig
end
--------------------------------
extract volreg registration base
3dbucket -prefix vr_base_min_outlier
pb02.$subj.r$minoutrun.despike+orig"[$minouttr]"
================================= align ==================================
for e2a: compute anat alignment transformation to EPI registration base
(new anat will be current anatSS.SSW+orig)
align_epi_anat.py -anat2epi -anat anatSS.SSW+orig
-suffix _al_junk
-epi vr_base_min_outlier+orig -epi_base 0
-epi_strip 3dAutomask
-anat_has_skull no
-cost lpc+ZZ
-volreg off -tshift off
================================== tlrc ==================================
nothing to do: have external -tlrc_NL_warped_dsets
warped anat : anatQQ.SSW+tlrc
affine xform : anatQQ.SSW.aff12.1D
non-linear warp : anatQQ.SSW_WARP.nii
================================= volreg =================================
align each dset to base volume, to anat, warp to tlrc space
verify that we have a +tlrc warp dataset
if ( ! -f anatQQ.SSW+tlrc.HEAD ) then
echo “** missing +tlrc warp dataset: anatQQ.SSW+tlrc.HEAD”
exit
endif
register and warp
foreach run ( $runs )
# register each volume to the base image
3dvolreg -verbose -zpad 1 -base vr_base_min_outlier+orig
-1Dfile dfile.r$run.1D -prefix rm.epi.volreg.r$run
-cubic
-1Dmatrix_save mat.r$run.vr.aff12.1D
pb02.$subj.r$run.despike+orig
# create an all-1 dataset to mask the extents of the warp
3dcalc -overwrite -a pb02.$subj.r$run.despike+orig -expr 1 \
-prefix rm.epi.all1
# catenate volreg/epi2anat/tlrc xforms
cat_matvec -ONELINE \
anatQQ.SSW.aff12.1D \
anatSS.SSW_al_junk_mat.aff12.1D -I \
mat.r$run.vr.aff12.1D > mat.r$run.warp.aff12.1D
# apply catenated xform: volreg/epi2anat/tlrc/NLtlrc
# then apply non-linear standard-space warp
3dNwarpApply -master anatQQ.SSW+tlrc -dxyz 1 \
-source pb02.$subj.r$run.despike+orig \
-nwarp "anatQQ.SSW_WARP.nii mat.r$run.warp.aff12.1D" \
-prefix rm.epi.nomask.r$run
# warp the all-1 dataset for extents masking
3dNwarpApply -master anatQQ.SSW+tlrc -dxyz 1 \
-source rm.epi.all1+orig \
-nwarp "anatQQ.SSW_WARP.nii mat.r$run.warp.aff12.1D" \
-interp cubic \
-ainterp NN -quiet \
-prefix rm.epi.1.r$run
# make an extents intersection mask of this run
3dTstat -min -prefix rm.epi.min.r$run rm.epi.1.r$run+tlrc
end
make a single file of registration params
cat dfile.r*.1D > dfile_rall.1D
----------------------------------------
create the extents mask: mask_epi_extents+tlrc
(this is a mask of voxels that have valid data at every TR)
3dMean -datum short -prefix rm.epi.mean rm.epi.min.r*.HEAD
3dcalc -a rm.epi.mean+tlrc -expr ‘step(a-0.999)’ -prefix mask_epi_extents
and apply the extents mask to the EPI data
(delete any time series with missing data)
foreach run ( $runs )
3dcalc -a rm.epi.nomask.r$run+tlrc -b mask_epi_extents+tlrc
-expr ‘a*b’ -prefix pb03.$subj.r$run.volreg
end
warp the volreg base EPI dataset to make a final version
cat_matvec -ONELINE
anatQQ.SSW.aff12.1D
anatSS.SSW_al_junk_mat.aff12.1D -I > mat.basewarp.aff12.1D
3dNwarpApply -master anatQQ.SSW+tlrc -dxyz 1
-source vr_base_min_outlier+orig
-nwarp “anatQQ.SSW_WARP.nii mat.basewarp.aff12.1D”
-prefix final_epi_vr_base_min_outlier
create an anat_final dataset, aligned with stats
3dcopy anatQQ.SSW+tlrc anat_final.$subj
record final registration costs
3dAllineate -base final_epi_vr_base_min_outlier+tlrc -allcostX
-input anat_final.$subj+tlrc |& tee out.allcostX.txt
================================== blur ==================================
blur each volume of each run
foreach run ( $runs )
3dmerge -1blur_fwhm 6.0 -doall -prefix pb04.$subj.r$run.blur
pb03.$subj.r$run.volreg+tlrc
end
================================== mask ==================================
create ‘full_mask’ dataset (union mask)
foreach run ( $runs )
3dAutomask -prefix rm.mask_r$run pb04.$subj.r$run.blur+tlrc
end
create union of inputs, output type is byte
3dmask_tool -inputs rm.mask_r*+tlrc.HEAD -union -prefix full_mask.$subj
---- create subject anatomy mask, mask_anat.$subj+tlrc ----
(resampled from tlrc anat)
3dresample -master full_mask.$subj+tlrc -input anatQQ.SSW+tlrc
-prefix rm.resam.anat
convert to binary anat mask; fill gaps and holes
3dmask_tool -dilate_input 5 -5 -fill_holes -input rm.resam.anat+tlrc
-prefix mask_anat.$subj
compute tighter EPI mask by intersecting with anat mask
3dmask_tool -input full_mask.$subj+tlrc mask_anat.$subj+tlrc
-inter -prefix mask_epi_anat.$subj
compute overlaps between anat and EPI masks
3dABoverlap -no_automask full_mask.$subj+tlrc mask_anat.$subj+tlrc
|& tee out.mask_ae_overlap.txt
note Dice coefficient of masks, as well
3ddot -dodice full_mask.$subj+tlrc mask_anat.$subj+tlrc
|& tee out.mask_ae_dice.txt
---- create group anatomy mask, mask_group+tlrc ----
(resampled from tlrc base anat, MNI152_2009_template_SSW.nii.gz)
3dresample -master full_mask.$subj+tlrc -prefix ./rm.resam.group
-input /home/sungjin/abin/MNI152_2009_template_SSW.nii.gz
convert to binary group mask; fill gaps and holes
3dmask_tool -dilate_input 5 -5 -fill_holes -input rm.resam.group+tlrc
-prefix mask_group
================================= scale ==================================
scale each voxel time series to have a mean of 100
(be sure no negatives creep in)
(subject to a range of [0,200])
foreach run ( $runs )
3dTstat -prefix rm.mean_r$run pb04.$subj.r$run.blur+tlrc
3dcalc -a pb04.$subj.r$run.blur+tlrc -b rm.mean_r$run+tlrc
-c mask_epi_extents+tlrc
-expr ‘c * min(200, a/b*100)*step(a)*step(b)’
-prefix pb05.$subj.r$run.scale
end
================================ regress =================================
compute de-meaned motion parameters (for use in regression)
1d_tool.py -infile dfile_rall.1D -set_run_lengths 180 150 150 150 150 180
-demean -write motion_demean.1D
compute motion parameter derivatives (for use in regression)
1d_tool.py -infile dfile_rall.1D -set_run_lengths 180 150 150 150 150 180
-derivative -demean -write motion_deriv.1D
convert motion parameters for per-run regression
1d_tool.py -infile motion_demean.1D -set_run_lengths 180 150 150 150 150 180
-split_into_pad_runs mot_demean
1d_tool.py -infile motion_deriv.1D -set_run_lengths 180 150 150 150 150 180
-split_into_pad_runs mot_deriv
create censor file motion_${subj}_censor.1D, for censoring motion
1d_tool.py -infile dfile_rall.1D -set_run_lengths 180 150 150 150 150 180
-show_censor_count -censor_prev_TR
-censor_motion 0.5 motion_${subj}
note TRs that were not censored
set ktrs = 1d_tool.py -infile motion_${subj}_censor.1D \ -show_trs_uncensored encoded
------------------------------
run the regression analysis
3dDeconvolve -input pb05.$subj.r*.scale+tlrc.HEAD
-censor motion_${subj}_censor.1D
-ortvec mot_demean.r01.1D mot_demean_r01
-ortvec mot_demean.r02.1D mot_demean_r02
-ortvec mot_demean.r03.1D mot_demean_r03
-ortvec mot_demean.r04.1D mot_demean_r04
-ortvec mot_demean.r05.1D mot_demean_r05
-ortvec mot_demean.r06.1D mot_demean_r06
-ortvec mot_deriv.r01.1D mot_deriv_r01
-ortvec mot_deriv.r02.1D mot_deriv_r02
-ortvec mot_deriv.r03.1D mot_deriv_r03
-ortvec mot_deriv.r04.1D mot_deriv_r04
-ortvec mot_deriv.r05.1D mot_deriv_r05
-ortvec mot_deriv.r06.1D mot_deriv_r06
-polort 3
-num_stimts 1
-stim_times 1 stimuli/Timing_CGE.txt ‘GAM’
-stim_label 1 Timing_CGE.txt
-jobs 4
-GOFORIT 5
-fout -tout -x1D X.xmat.1D -xjpeg X.jpg
-x1D_uncensored X.nocensor.xmat.1D
-errts errts.${subj}
-bucket stats.$subj
if 3dDeconvolve fails, terminate the script
if ( $status != 0 ) then
echo ‘---------------------------------------’
echo ‘** 3dDeconvolve error, failing…’
echo ’ (consider the file 3dDeconvolve.err)’
exit
endif
display any large pairwise correlations from the X-matrix
1d_tool.py -show_cormat_warnings -infile X.xmat.1D |& tee out.cormat_warn.txt
display degrees of freedom info from X-matrix
1d_tool.py -show_df_info -infile X.xmat.1D |& tee out.df_info.txt
– execute the 3dREMLfit script, written by 3dDeconvolve –
tcsh -x stats.REML_cmd
if 3dREMLfit fails, terminate the script
if ( $status != 0 ) then
echo ‘---------------------------------------’
echo ‘** 3dREMLfit error, failing…’
exit
endif
create an all_runs dataset to match the fitts, errts, etc.
3dTcat -prefix all_runs.$subj pb05.$subj.r*.scale+tlrc.HEAD
--------------------------------------------------
create a temporal signal to noise ratio dataset
signal: if ‘scale’ block, mean should be 100
noise : compute standard deviation of errts
3dTstat -mean -prefix rm.signal.all all_runs.$subj+tlrc"[$ktrs]"
3dTstat -stdev -prefix rm.noise.all errts.${subj}_REML+tlrc"[$ktrs]"
3dcalc -a rm.signal.all+tlrc
-b rm.noise.all+tlrc
-c full_mask.$subj+tlrc
-expr ‘c*a/b’ -prefix TSNR.$subj
---------------------------------------------------
compute and store GCOR (global correlation average)
(sum of squares of global mean of unit errts)
3dTnorm -norm2 -prefix rm.errts.unit errts.${subj}_REML+tlrc
3dmaskave -quiet -mask full_mask.$subj+tlrc rm.errts.unit+tlrc
> gmean.errts.unit.1D
3dTstat -sos -prefix - gmean.errts.unit.1D' > out.gcor.1D
echo “-- GCOR = cat out.gcor.1D
”
---------------------------------------------------
compute correlation volume
(per voxel: average correlation across masked brain)
(now just dot product with average unit time series)
3dcalc -a rm.errts.unit+tlrc -b gmean.errts.unit.1D -expr ‘a*b’ -prefix rm.DP
3dTstat -sum -prefix corr_brain rm.DP+tlrc
create fitts dataset from all_runs and errts
3dcalc -a all_runs.$subj+tlrc -b errts.${subj}+tlrc -expr a-b
-prefix fitts.$subj
create fitts from REML errts
3dcalc -a all_runs.$subj+tlrc -b errts.${subj}_REML+tlrc -expr a-b
-prefix fitts.$subj_REML
create ideal files for fixed response stim types
1dcat X.nocensor.xmat.1D’[24]’ > ideal_Timing_CGE.txt.1D
--------------------------------------------------------
compute sum of non-baseline regressors from the X-matrix
(use 1d_tool.py to get list of regressor colums)
set reg_cols = 1d_tool.py -infile X.nocensor.xmat.1D -show_indices_interest
3dTstat -sum -prefix sum_ideal.1D X.nocensor.xmat.1D"[$reg_cols]"
also, create a stimulus-only X-matrix, for easy review
1dcat X.nocensor.xmat.1D"[$reg_cols]" > X.stim.xmat.1D
============================ blur estimation =============================
compute blur estimates
touch blur_est.$subj.1D # start with empty file
create directory for ACF curve files
mkdir files_ACF
– estimate blur for each run in epits –
touch blur.epits.1D
restrict to uncensored TRs, per run
foreach run ( $runs )
set trs = 1d_tool.py -infile X.xmat.1D -show_trs_uncensored encoded \ -show_trs_run $run
if ( $trs == “” ) continue
3dFWHMx -detrend -mask full_mask.$subj+tlrc
-ACF files_ACF/out.3dFWHMx.ACF.epits.r$run.1D
all_runs.$subj+tlrc"[$trs]" >> blur.epits.1D
end
compute average FWHM blur (from every other row) and append
set blurs = ( 3dTstat -mean -prefix - blur.epits.1D'{0..$(2)}'\'
)
echo average epits FWHM blurs: $blurs
echo “$blurs # epits FWHM blur estimates” >> blur_est.$subj.1D
compute average ACF blur (from every other row) and append
set blurs = ( 3dTstat -mean -prefix - blur.epits.1D'{1..$(2)}'\'
)
echo average epits ACF blurs: $blurs
echo “$blurs # epits ACF blur estimates” >> blur_est.$subj.1D
– estimate blur for each run in errts –
touch blur.errts.1D
restrict to uncensored TRs, per run
foreach run ( $runs )
set trs = 1d_tool.py -infile X.xmat.1D -show_trs_uncensored encoded \ -show_trs_run $run
if ( $trs == “” ) continue
3dFWHMx -detrend -mask full_mask.$subj+tlrc
-ACF files_ACF/out.3dFWHMx.ACF.errts.r$run.1D
errts.${subj}+tlrc"[$trs]" >> blur.errts.1D
end
compute average FWHM blur (from every other row) and append
set blurs = ( 3dTstat -mean -prefix - blur.errts.1D'{0..$(2)}'\'
)
echo average errts FWHM blurs: $blurs
echo “$blurs # errts FWHM blur estimates” >> blur_est.$subj.1D
compute average ACF blur (from every other row) and append
set blurs = ( 3dTstat -mean -prefix - blur.errts.1D'{1..$(2)}'\'
)
echo average errts ACF blurs: $blurs
echo “$blurs # errts ACF blur estimates” >> blur_est.$subj.1D
– estimate blur for each run in err_reml –
touch blur.err_reml.1D
restrict to uncensored TRs, per run
foreach run ( $runs )
set trs = 1d_tool.py -infile X.xmat.1D -show_trs_uncensored encoded \ -show_trs_run $run
if ( $trs == “” ) continue
3dFWHMx -detrend -mask full_mask.$subj+tlrc
-ACF files_ACF/out.3dFWHMx.ACF.err_reml.r$run.1D
errts.${subj}_REML+tlrc"[$trs]" >> blur.err_reml.1D
end
compute average FWHM blur (from every other row) and append
set blurs = ( 3dTstat -mean -prefix - blur.err_reml.1D'{0..$(2)}'\'
)
echo average err_reml FWHM blurs: $blurs
echo “$blurs # err_reml FWHM blur estimates” >> blur_est.$subj.1D
compute average ACF blur (from every other row) and append
set blurs = ( 3dTstat -mean -prefix - blur.err_reml.1D'{1..$(2)}'\'
)
echo average err_reml ACF blurs: $blurs
echo “$blurs # err_reml ACF blur estimates” >> blur_est.$subj.1D
add 3dClustSim results as attributes to any stats dset
mkdir files_ClustSim
run Monte Carlo simulations using method ‘ACF’
set params = ( grep ACF blur_est.$subj.1D | tail -n 1
)
3dClustSim -both -mask full_mask.$subj+tlrc -acf $params[1-3]
-cmd 3dClustSim.ACF.cmd -prefix files_ClustSim/ClustSim.ACF
run 3drefit to attach 3dClustSim results to stats
set cmd = ( cat 3dClustSim.ACF.cmd
)
$cmd stats.$subj+tlrc stats.${subj}_REML+tlrc
================== auto block: generate review scripts ===================
generate a review script for the unprocessed EPI data
gen_epi_review.py -script @epi_review.$subj
-dsets pb00.$subj.r*.tcat+orig.HEAD
generate scripts to review single subject results
(try with defaults, but do not allow bad exit status)
gen_ss_review_scripts.py -mot_limit 0.5 -exit0
-ss_review_dset out.ss_review.$subj.txt
-write_uvars_json out.ss_review_uvars.json
========================== auto block: finalize ==========================
remove temporary files
\rm -f rm.*
if the basic subject review script is here, run it
(want this to be the last text output)
if ( -e @ss_review_basic ) then
./@ss_review_basic |& tee out.ss_review.$subj.txt
# generate html ss review pages
# (akin to static images from running @ss_review_driver)
apqc_make_tcsh.py -review_style basic -subj_dir . \
-uvar_json out.ss_review_uvars.json
tcsh @ss_review_html |& tee out.review_html
apqc_make_html.py -qc_dir QC_$subj
echo "\nconsider running: \n\n afni_open -b $subj.results/QC_$subj/index.html\n"
endif
return to parent directory (just in case…)
cd …
echo “execution finished: date
”