This is part of the Zsh Utilities.
This file contains some function/alias in relation with “work” topic.
pkgtools
is used as wrapper for installation. It is a bash program which wraps
cmake
or some Makefile
and is intensively used with SuperNEMO software.
alias pkg-notify='notify -t 2000 -i stock_dialog-info "pkgtools"'
function pkgc ()
{
./pkgtools.d/pkgtool configure $@ && pkg-notify "Configure done"
}
function pkgb ()
{
./pkgtools.d/pkgtool build && pkg-notify "Build done"
}
function pkgt ()
{
./pkgtools.d/pkgtool test && pkg-notify "Running test programs done"
}
function pkgr ()
{
./pkgtools.d/pkgtool reset && pkg-notify "Reset done"
}
function pkgi ()
{
./pkgtools.d/pkgtool install && pkg-notify "Install done"
}
alias root='root -l'
function root/newBrowser () {
local file_list=
while [ -n "$1" ]; do
file="$1"
if [ ! -f $file ]; then
pkgtools::msg_error "Could not find file $file"
else
file_list+="$file "
fi
shift 1
done
root -l $(echo ${file_list}) ~/.config/root/macros/newBrowser.C
return 0;
}
alias rb="root/newBrowser"
function root/mergeTree () {
}
compdef _bxgenbb_inspector bxgenbb_inspector
function _bxgenbb_inspector ()
{
typeset -A opt_args
local context state line curcontext=""
_arguments \
{-h,--help}'[produce help message ]' \
{-d,--debug}'[produce debug logging ]' \
{-l,--load-dll}'[load a dynamic library ]' \
--dll-config'[load a configuration file for dynamic library loading ]' \
{-I,--interactive}'[run in interactive mode (not implemented) ]' \
{-n,--number-of-events}'[set the number of generated events. Example : --number-of-events 10000 ]' \
{-x,--trace-index}'[set the trace index. Example : --trace-index 10000 ]' \
{-u,--prng-trunc}'[set the trunc index of the random number generator (PRNG). Example : --prng-trunc 7 ]' \
{-s,--prng-seed}'[set the seed of the random number generator (PRNG). Example : --prng-seed 314159 ]' \
{-K,--prng-tracker}'[set the PRNG tracker file. Example : --prng-tracker genbb_inspector_prng.trk ]' \
{-c,--configuration}'[set the genbb manager configuration file. Example : --configuration genbb.conf ]' \
{-a,--action}'[set the action. Examples : --action list --action shoot ]' \
{-g,--generator}'[set the particle generator. Example : --generator Bi207 ]' \
{-H,--histo-def}'[set the name of an histogram definition input filename. ]' \
{-o,--output-file}'[set the name of an output filename. ]' \
{-t,--prompt-time-limit}'[set the limit on prompt time in ns. ]' \
{-P,--prompt}'[analyze prompt particles ]' \
{-D,--delayed}'[analyze delayed particles ]' \
{-T,--title-prefix}'[set a title prefix for exported histograms ]' \
{-S,--name-suffix}'[set a name suffix for exported histograms ]' \
'*: :->args' \
&& ret=0
case $state in
args)
if [[ CURRENT -eq NORMARG && ${+opt_args[--match]} -eq 0 ]]
then
# If the current argument is the first non-option argument
# and --match isn't present then a pattern is expected
_message -e patterns 'pattern' && ret=0
else
_files -/
fi
;;
esac
return ret
}
function --flvisualize ()
{
if (( $+commands[colout] )); then
$(pkgtools::get_binary_path flvisualize) $@ 2>&1 | colout -t mylogging -T ${zsh_utilities_dir}
else
$(pkgtools::get_binary_path flvisualize) $@
fi
}
function --bxdpp_processing ()
{
mkdir -p /tmp/garrido/snemo.d
if (( $+commands[colout] )); then
TIMEFMT="$terminfo[bold]$fg[blue][notice]:${reset_color}$fg[blue] %U user %S system %P cpu %*E total";\
time $(pkgtools::get_binary_path bxdpp_processing) $@ 2>&1 | \
colout -t mylogging -T ${zsh_utilities_dir} | \
colout "([0-9]+) (pro.*:)(.*\))(.*\))" blue,blue,green,red bold,normal,bold,bold
else
time $(pkgtools::get_binary_path bxdpp_processing) $@
fi
}
function --bxocd_manual ()
{
if (( $+commands[cm] )); then
$(pkgtools::get_binary_path bxocd_manual) $@ | colout -t mylogging -T ${zsh_utilities_dir} | colout -s rst
else
$(pkgtools::get_binary_path bxocd_manual) $@
fi
}
function mydpp_processing ()
{
pkgtools::at_function_enter mydpp_processing
if ! $(pkgtools::has_binary bxdpp_processing); then
pkgtools::msg_error "SN@ilWare has not been set"
pkgtools::at_function_exit
return 1
fi
local opt=
while [ -n "$1" ]; do
token="$1"
case "$token" in
--*-generator)
shift 1
local parameter=${token:2}
opt+="--variant-set=simulation:${parameter//-/_}_name=$1 "
;;
--source-material)
shift 1
opt+="--variant-set=detector:bb_source_material=snemo::$1 "
;;
--Bz-magnitude)
shift 1
opt+="--variant-set=detector:Bz_magnitude=$1 "
;;
--magnetic-field-type)
shift 1
opt+="--variant-set=detector:magnetic_field_type=$1 "
;;
--*-energy-resolution | --*-*-energy-threshold)
shift 1
local parameter=${token:2}
opt+="--variant-set=detector:${parameter//-/_}=$1 "
;;
--random-seeds)
opt+="--variant-set=simulation:random_seed_flag=true "
;;
--spg-*)
shift 1
local parameter=${token:2}
opt+="--variant-set=simulation:${parameter//-/_}=$1 "
;;
--output-path)
shift 1
if [ ! -d $1 ]; then
mkdir -p $1
fi
opt+="--variant-set=core:output_path=$1 "
;;
--dev-logging)
shift 1
opt+="--variant-set=core:logging_priority=$1 "
;;
*)
opt+="$token "
;;
esac
shift 1
done
_dump () {
local prefix="[notice]:"
declare -A db
local variants=( $(echo ${opt} | tr " " "\n" | grep 'variant-set') )
for i in ${variants}; do
local registry=$(echo $i | awk -F'[=:]' '{print $2}')
local parameter=$(echo $i | awk -F'[=:]' '{print $3}')
local value=$(echo $i | awk -F'[=:]' '{print $4}')
db[$registry]+="$prefix ↳ Parameter '$parameter': $value\n"
done
echo
if [ ! -n "${variants}" ]; then
echo "$prefix Variants set to their default values !"
else
echo "$prefix Variants dump:"
fi
for k in "${(@k)db}"; do
echo "$prefix Registry '$k'"
for i in ${db[$k]}; do
echo "$i"
done
done
}
local _config=$SNAILWARE_SIMULATION_DIR/snemo_simulation_configuration/current
local _flressource=$(flquery --resourcedir | tr -d '\n' | sed 's/\(.*Falaise-.*\)-\(.*\)-\(.*\)\/\(.*\)/\1\/\4/')
local _bxdll=$(bxquery --libdir | tr -d '\n')
local _fldll=$(flquery --libdir | tr -d '\n')
--bxdpp_processing \
--module-manager-config ${_config}/module_manager.conf \
--dlls-config ${_config}/dlls.conf \
--variant-config ${_config}/snvariant_manager.conf \
--datatools::resource-path=bxdll@${_bxdll} \
--datatools::resource-path=fldll@${_fldll} \
--datatools::resource-path=falaise@${_flressource} \
--datatools::resource-path=configuration@${_config} \
$(echo ${opt})
if (( $+commands[colout] )); then
_dump | colout -t mylogging -T ${zsh_utilities_dir}
fi
unset _config _snware _falaise
pkgtools::at_function_exit
return 0
}
compdef _mydpp_processing mydpp_processing
function _mydpp_processing ()
{
_config=$SNAILWARE_SIMULATION_DIR/snemo_simulation_configuration/current
_module_all () {
# we cache the list of repository
if (( ! $+modulelist )); then
if [ ! -z "$_config" ]; then
for f in $(eval echo ${_config})/*.conf; do
line=$(cat $f | grep "\[.*type=.*_module.*\]")
name=$(echo $line | sed 's@\[name="\(.*\)".*type.*@\1@')
modulelist+=( $(echo $name) )
done
fi
fi
}
_arguments \
{-P,--logging}'[set logging priority]:logging:->log' \
{-l,--load-dll}'[set a DLL to be loaded]' \
{-L,--dlls-config}'[set the DLL loader configuration file]:file:_files -g \*.conf' \
{-%,--modulo}'[set the modulo print period for data record]:number' \
{-M,--max-records}'[set the maximum number of data records to be processed]:number' \
{-X,--no-max-records}'[Do not limit the maximum number of data records to be processed]:number' \
{-m,--module}'[add a module in the pipeline (optional)]:module:->module' \
{-c,--module-manager-config}'[set the module manager configuration file]:manager:_files -g \*.conf' \
{-i,--input-file}'[set an input file (optional)]:file:_files -g \*.{brio,xml,data.gz,txt}' \
{-o,--output-file}'[set the output file (optional)]:file:_files -g \*.{brio,xml,data.gz,txt}' \
{-O,--max-records-per-output-file}'[set the maximum number of data records per output file]:number' \
'--event-generator[set event generator]:eg:->eg' \
'--vertex-generator[set vertex generator]:vg:->vg' \
'--magnetic-field-type[set the magnetic field type]:mft:->mft' \
'--Bz-magnitude[set Bz magnetic field magnitude]' \
'--calo-energy-resolution[set main wall energy resolution]' \
'--xcalo-energy-resolution[set X-wall energy resolution]' \
'--gveto-energy-resolution[set gamma veto energy resolution]' \
'--calo-high-energy-threshold[set main wall high energy threshold]' \
'--xcalo-high-energy-threshold[set X-wall high energy threshold]' \
'--gveto-high-energy-threshold[set gamma veto high energy threshold]' \
'--calo-low-energy-threshold[set main wall low energy threshold]' \
'--xcalo-low-energy-threshold[set X-wall low energy threshold]' \
'--gveto-low-energy-threshold[set gamma low veto energy threshold]' \
'--spg-name[set particle name for single particle generator (spg)]:spgname:->spgname' \
'--spg-monokinetic-energy[set the monokinetic energy for single particle generator (spg)]' \
'--spg-flat-energy-min[set the minimal energy for single particle generator (spg)]' \
'--spg-flat-energy-max[set the maximal energy for single particle generator (spg)]' \
'--spg-gaussian-energy-mean[set the mean energy for single particle generator (spg)]' \
'--spg-gaussian-energy-sigma[set the sigma energy for single particle generator (spg)]' \
'--random-seeds[set all seeds to random values]' \
'--source-material[set the source foil material]:bbsm:->bbsm' \
'--output-path[set the output directory of generated files]:file:_files' \
'--dev-logging[set the development logging priority]:logging:->log' \
'--datatools\:\:variant-set=core\:[Set the values of a variant]:vs:->vs' \
'*: :->args' \
&& ret=0
case $state in
module)
_module_all
_describe -t modulelist 'module' modulelist && ret=0
;;
args)
_path_files -/ && ret=0
;;
log)
_logs=(
"fatal"
"critical"
"error"
"warning"
"notice"
"information"
"debug"
"trace"
)
_values 'Logging priority' $_logs && ret=0
;;
mft)
_mfts=(
"Bz_uniform"
"Bz_polynomial"
"B_mapped"
)
_values 'Magnetic field type' $_mfts && ret=0
;;
eg)
_egs=( $(sed -n '/event_generator_name/, /^\[/ { /string.enumerated_/ p }' \
${_config}/simulation_variants.def | awk -F \" '{print $2}') )
_values 'Event generator' $_egs && ret=0
;;
spgname)
_spgs=( $(sed -n '/spg_name/, /^\[/ { /string.enumerated_/ p }' \
${_config}/simulation_variants.def | awk -F \" '{print $2}') )
_values 'Single particle generator name' $_spgs && ret=0
;;
vg)
_vgs=( $(sed -n '/vertex_generator_name/, /^\[/ { /string.enumerated_/ p }' \
${_config}/simulation_variants.def | awk -F \" '{print $2}') )
_values 'Vertex generator' $_vgs && ret=0
;;
bbsm)
_bbsms=( $(sed -n '/bb_source_material/, /^\[/ { /string.enumerated_/ p }' \
${_config}/detector_variants.def | awk -F \" '{print substr($2,8)}' ) )
_values 'Source materials' $_bbsms && ret=0
;;
vs)
_vss=( $(sed -n '/parameters\./ { s/....$//p }' \
${_config}/*_variants.def | awk -F \" '{print $2}') )
_values 'Parameters' $_vss && ret=0
;;
esac
return ret
}
function mydpp_analysis ()
{
pkgtools::at_function_enter mydpp_analysis
if $(pkgtools::has_binary bxdpp_processing); then
opt=
while [ -n "$1" ]; do
token="$1"
# if [ "${token}" = "--event-generator" -o "${token}" = "-e" ]; then
# shift 1
# opt+="--datatools::variant-set=simulation:event_generator_name=$1 "
# else
opt+="$token "
# fi
shift 1
done
_config=$SNAILWARE_SIMULATION_DIR/snemo_analysis_modules/config
--bxdpp_processing \
--module-manager-config ${_config}/module_manager.conf \
--dlls-config ${_config}/dlls.conf \
--datatools::resource-path=falaise@$(flquery --resourcedir | tr -d '\n') \
$(echo ${opt})
# --datatools::variant-config=${_config}/snvariant_manager.conf \
# --datatools::resource-path=snware@$SNAILWARE_PRO_DIR \
# --datatools::resource-path=configuration@${_config} \
else
pkgtools::msg_error "SN@ilWare has not been set"
pkgtools::at_function_exit
return 1
fi
pkgtools::at_function_exit
return 0
}
compdef _mydpp_analysis mydpp_analysis
function _mydpp_analysis ()
{
_config=$SNAILWARE_SIMULATION_DIR/snemo_analysis_modules/config
_module_all () {
# we cache the list of repository
if (( ! $+ana_modulelist )); then
if [ ! -z "$_config" ]; then
for f in $(eval echo ${_config})/*.conf; do
line=$(cat $f | grep "\[.*type=.*_module.*\]")
name=$(echo $line | sed 's@\[name="\(.*\)".*type.*@\1@')
ana_modulelist+=( $(echo $name) )
done
fi
fi
}
_arguments \
{-P,--logging-priority}'[set logging priority]:logging:->log' \
{-l,--load-dll}'[set a DLL to be loaded]' \
{-L,--dlls-config}'[set the DLL loader configuration file]:file:_files -g \*.conf' \
{-%,--modulo}'[set the modulo print period for data record]:number' \
{-M,--max-records}'[set the maximum number of data records to be processed]:number' \
{-X,--no-max-records}'[Do not limit the maximum number of data records to be processed]:number' \
{-m,--module}'[add a module in the pipeline (optional)]:module:->module' \
{-c,--module-manager-config}'[set the module manager configuration file]:manager:_files -g \*.conf' \
{-i,--input-file}'[set an input file (optional)]:file:_files -g \*.{brio,xml,data.gz,txt}' \
{-o,--output-file}'[set the output file (optional)]:file:_files -g \*.{brio,xml,data.gz,txt}' \
'*: :->args' \
&& ret=0
case $state in
log)
_logs=(
"fatal"
"critical"
"error"
"warning"
"notice"
"information"
"debug"
"trace"
)
_values 'Logging priority' $_logs && ret=0
;;
module)
_module_all
_describe -t ana_modulelist 'module' ana_modulelist && ret=0
;;
args)
_path_files -/ && ret=0
;;
esac
return ret
}
function myocd_manual ()
{
pkgtools::at_function_enter myocd_manual
if $(pkgtools::has_binary bxocd_manual); then
_lib_dir=$SNAILWARE_PRO_DIR/falaise/install/lib64
_libs=$(find ${_lib_dir} -type f -name "*.so")
libs=
for l in ${=_libs}; do
libs+=$(echo -n "--load-dll $(echo $l | sed 's/.*lib\(.*\)\.so/\1/')@$(dirname $l) ")
done
--bxocd_manual $(echo $libs) $@
else
pkgtools::msg_error "SN@ilWare has not been set"
pkgtools::at_function_exit
return 1
fi
pkgtools::at_function_exit
return 0
}
compdef _myocd_manual myocd_manual
function _myocd_manual ()
{
_ocd_all () {
# we cache the list of repository
if (( ! $+ocdlist )); then
ocdlist+=( $(myocd_manual --action list | tail -n +2 | sed 's/:/\\:/g') )
fi
}
_arguments \
{-P,--logging-priority}'[set logging priority]:logging:->log' \
{-l,--load-dll}'[set a DLL to be loaded]' \
{-L,--dlls-config}'[set the DLL loader configuration file]:file:_files -g \*.conf' \
{-c,--class-id}'[set the ID of the class to be investigated]:class:->class' \
{-a,--action}'[define the action to be performed]:action:->action' \
{-i,--input-file}'[set an input file (optional)]:file:_files' \
{-o,--output-file}'[set the output file (optional)]:file:_files' \
'*: :->args' \
&& ret=0
case $state in
log)
_logs=(
"fatal"
"critical"
"error"
"warning"
"notice"
"information"
"debug"
"trace"
)
_values 'Logging priority' $_logs && ret=0
;;
action)
_actions=(
"list"
"show"
"skeleton"
"validate"
)
_values 'Action list' $_actions && ret=0
;;
class)
_ocd_all
_describe -t ocdlist 'OCD' ocdlist && ret=0
;;
args)
#_path_files -/ && ret=0
;;
esac
return ret
}
function myvisualize ()
{
pkgtools::at_function_enter myvisualize
_config=$SNAILWARE_SIMULATION_DIR/snemo_simulation_configuration/current
--flvisualize $@
# --datatools::resource-path=configuration@${_config} \
# $@
pkgtools::at_function_exit
return 0
}
compdef _myvisualize myvisualize
function _myvisualize ()
{
local ret=1 state
typeset -A opt_args
_arguments \
{-h,--help}'[produce help message]' \
{-P,--logging-priority}'[set logging priority]:log:(fatal critical error warning notice information debug trace)' \
{-s,--scale}'[scale factor for computer screen (height/width)]:number' \
{-a,--auto-reading-delay}'[automatic event reading delay in seconds]:number' \
--detector-config-file'[set the path to the detector config file]:file:_files -g \*.conf' \
--style-config-file'[set the path to the style config file]:file:_files -g \*.conf' \
--cut-config-file'[set the path to the cut manager config file]:file:_files -g \*.conf' \
--preload'[enable the load in memory of Boost archive (only working with plain simulated output from sng4)]' \
{-i,--input-file}'[set an input file(s)]:file:_files -g \*.{brio,xml,data.gz,txt}' \
{-l,--load-dll}'[set a DLL to be loaded]' \
--2d-display'[set position of 2D display frame]:position:(left right)' \
--full-2d-view'[add a new tab with top/front/side 2D view in one frame]' \
--focus-on-roi'[focus views on the region-of-interest]' \
--show-simulated-vertex'[show simulated vertex]:boolean:(true false)' \
--show-simulated-tracks'[show simulated tracks]:boolean:(true false)' \
--show-simulated-hits'[show simulated hits]:boolean:(true false)' \
--show-calibrated-hits'[show calibrated hits]:boolean:(true false)' \
--show-calibrated-info'[show calibrated info]:boolean:(true false)' \
--show-tracker-clustered-hits'[show tracker clustered hits]:boolean:(true false)' \
--show-tracker-trajectories'[show tracker trajectories]:boolean:(false true)' \
--show-particle-tracks'[show particle tracks]:boolean:(false true)' \
'*: :->args' \
&& ret=0
case $state in
args)
_files -g \*.conf
;;
esac
return ret
}
function set_brew ()
{
pkgtools::msg_notice "Setting brew installation"
pkgtools::add_path_to_PATH ~/Workdir/NEMO/supernemo/snware/brew/cadfaelbrew/bin
}
function unset_brew ()
{
pkgtools::msg_notice "Unsetting brew installation"
pkgtools::remove_path_to_PATH ~/Workdir/NEMO/supernemo/snware/brew/cadfaelbrew/bin
}
function cmb::build_parfile()
{
pkgtools::default_values
pkgtools::at_function_enter cmb::build_parfile
local engine
local likelihood
local spectrum
local output_parfile
local setup_file
local with_bao=false
local with_sn=false
local with_lensing=false
while [ -n "$1" ]; do
local token="$1"
if [ ${token[0,1]} = - ]; then
local opt=${token}
if [[ ${opt} = -h || ${opt} = --help ]]; then
echo "Usage:\n cmb::build_parfile [options]"
echo "Options:"
echo " -h [--help] print this help message"
echo " -d [--debug] debug mode"
echo " -D [--devel] devel mode"
echo " --engine set Boltzmann engine (mandatory)"
echo " --likelihood set likelihood type (mandatory)"
echo " --spectrum set data spectrum (mandatory)"
echo " --with-bao add BAO likelihood"
echo " --with-sn add SNIA likelihood"
echo " --with-lensing add lensing likelihood"
echo " --setup-file set path to the file holding parameter default values"
echo " --output-file set output parfile name"
return 0
elif [[ ${opt} = -d || ${opt} = --debug ]]; then
pkgtools::msg_using_debug
elif [[ ${opt} = -D || ${opt} = --devel ]]; then
pkgtools::msg_using_devel
elif [[ ${opt} = --output-file ]]; then
shift 1
output_parfile="$1"
elif [[ ${opt} = -e || ${opt} = --engine ]]; then
shift 1
engine="$1"
elif [[ ${opt} = -l || ${opt} = --likelihood ]]; then
shift 1
likelihood="$1"
elif [[ ${opt} = -s || ${opt} = --spectrum ]]; then
shift 1
spectrum="$1"
elif [[ ${opt} = --with-bao ]]; then
with_bao=true
elif [[ ${opt} = --with-sn ]]; then
with_sn=true
elif [[ ${opt} = --with-lensing ]]; then
with_lensing=true
elif [[ ${opt} = --setup-file ]]; then
shift 1
setup_file="$1"
fi
else
parfile="${token}"
fi
shift 1
done
if [ -z ${engine} ]; then
pkgtools::msg_error "Missing engine!"
pkgtools::at_function_exit
return 1
fi
if [ -z ${likelihood} ]; then
pkgtools::msg_error "Missing likelihood type!"
pkgtools::at_function_exit
return 1
fi
local parfile
local sep="###############################################################"
function add_header() {
parfile+="# -*- mode: conf-unix; -*-\n"
parfile+="${sep}\n"
parfile+="# Automagically generated $(date)\n"
parfile+="# from ${setup_file} with the following options:\n"
parfile+="# Boltzmann engine : ${engine}\n"
parfile+="# spectrum : ${spectrum}\n"
parfile+="# likelihood : ${likelihood}\n"
if ${with_bao} || ${with_sn} || ${with_lensing} ; then
parfile+="# add likelihoods : "
if ${with_bao}; then
parfile+="BAO "
fi
if ${with_sn}; then
parfile+="SN "
fi
if ${with_lensing}; then
parfile+="Lensing "
fi
parfile+="\n"
fi
parfile+="${sep}\n\n"
}
function add_title() {
local title="$1"
parfile+="${sep}\n"
parfile+="# ${title}\n"
}
local section
local subsection
local stream
local reset=true
function org::extract_section() {
if ${reset}; then stream=$(cat ${setup_file}); fi
function org::extract_section_level() {
grep "${section}" <<< ${stream} | grep -o "*" | grep -c .
}
local level=$(org::extract_section_level)
pkgtools::msg_devel "section=$section"
pkgtools::msg_devel "level=$level"
stream=$(sed -E -n '/^\*.{'$((level-1))'}.*'${section}'/,/^\*.{,'$((level-1))'} /p' <<< ${stream})
pkgtools::msg_devel "stream=${stream}"
}
function extract_params() {
pkgtools::msg_devel "section=$section"
pkgtools::msg_devel "subsection=$subsection"
org::extract_section
if [[ ! -z ${subsection} ]]; then
reset=false section=${subsection} org::extract_section
fi
parfile+=$(grep -v "^|-\|^*\|^#" <<< ${stream} | sed -e 's/|//g' -e 's/^[ \t]*//' -e '/^[A-Z]/d' -e '/^\s*$/d')
parfile+="\n\n"
section=
subsection=
}
local file_type
function extract_file() {
local file
if [[ ${file_type:l} = hillipop || ${file_type} = clikfile2 ]]; then
file=$(cat ${setup_file} | grep "${file_type:l}.*| *${spectrum} *|.*|" | awk -F'|' '{gsub(" ", "", $4); print $4}')
else
file=$(cat ${setup_file} | grep "${file_type} " | awk -F'|' '{gsub(" ", "", $4); print $4}')
fi
pkgtools::msg_devel "file type = ${file_type}"
pkgtools::msg_devel "file = ${file}"
parfile+="${file_type}=${file}\n\n"
}
# Write file
add_header
# Boltzmann engine
add_title "Boltzmann engine"
parfile+="engine=${engine}\n\n"
if [[ ${engine} = class ]]; then
section="CLASS setup" extract_params
parfile=$(sed -e 's#\(precisionFile\) *\(.*\)#\1=\2#g' <<< ${parfile})
parfile=$(sed -e 's#\(do_mPk\) *\(.*\)#\1=\2#g' <<< ${parfile})
elif [[ ${engine} = pico ]]; then
section="PICO setup" extract_params
fi
# Cosmological model
add_title "Cosmological model"
section="Cosmological model" extract_params
# Likelihoods
add_title "Likelihoods"
add_title "Planck likelihood for low-l"
file_type="clikfile" extract_file
add_title "Global parameter"
section="Likelihood parameters" subsection="Global parameter" extract_params
pkgtools::msg_devel "likelihood = ${likelihood:l}"
if [[ ${likelihood:l} = hillipop ]]; then
add_title "Hillipop likelihood"
file_type="${likelihood}" extract_file
section="Hillipop likelihood" subsection="General parameters" extract_params
if [[ ${likelihood} = HiLLiPOP ]]; then
section="Hillipop likelihood" subsection="Legacy" extract_params
elif [[ ${likelihood} = Hillipop ]]; then
section="Hillipop likelihood" subsection="source modeling" extract_params
fi
else
add_title "Planck likelihood for high-l"
file_type="clikfile2" extract_file
if [[ ${spectrum} = TT ]]; then
section="Planck likelihood" subsection="TT parameters" extract_params
elif [[ ${spectrum} = EE ]]; then
section="Planck likelihood" subsection="EE parameters" extract_params
elif [[ ${spectrum} = TE ]]; then
section="Planck likelihood" subsection="TE parameters" extract_params
elif [[ ${spectrum} = TE+EE ]]; then
section="Planck likelihood" subsection="TE parameters" extract_params
section="Planck likelihood" subsection="EE parameters" extract_params
elif [[ ${spectrum} = TT+EE ]]; then
section="Planck likelihood" subsection="TT parameters" extract_params
section="Planck likelihood" subsection="EE parameters" extract_params
elif [[ ${spectrum} = ALL ]]; then
section="Planck likelihood" subsection="TT parameters" extract_params
section="Planck likelihood" subsection="EE parameters" extract_params
section="Planck likelihood" subsection="TE parameters" extract_params
fi
fi
# Add. likelihoods
if ${with_bao}; then
add_title "BAO likelihood"
file_type="BAOFile" extract_file
file_type="BAO_3DFile" extract_file
fi
if ${with_sn}; then
add_title "SNIa likelihood"
file_type="JLA_SNIA_File" extract_file
section="SNIA likelihood" extract_params
fi
if ${with_lensing}; then
add_title "Lensing likelihood"
file_type="lensing_file" extract_file
fi
# MCMC priors
add_title "MCMC priors"
section="MCMC priors" subsection="Global prior" extract_params
if [[ ${likelihood:l} = hillipop ]]; then
section="Hillipop priors" subsection="General parameters" extract_params
if [[ ${likelihood} = Hillipop ]]; then
section="Hillipop priors" subsection="source modeling" extract_params
fi
elif [[ ${likelihood} = clik ]]; then
if [[ ${spectrum} = TT ]]; then
section="Planck priors" subsection="TT priors" extract_params
elif [[ ${spectrum} = EE ]]; then
section="Planck priors" subsection="EE priors" extract_params
elif [[ ${spectrum} = TE ]]; then
section="Planck priors" subsection="TE priors" extract_params
elif [[ ${spectrum} = TE+EE ]]; then
section="Planck priors" subsection="TE priors" extract_params
section="Planck priors" subsection="EE priors" extract_params
elif [[ ${spectrum} = TT+EE ]]; then
section="Planck priors" subsection="TT priors" extract_params
section="Planck priors" subsection="EE priors" extract_params
elif [[ ${spectrum} = ALL ]]; then
section="Planck priors" subsection="TT priors" extract_params
section="Planck priors" subsection="EE priors" extract_params
section="Planck priors" subsection="TE priors" extract_params
fi
fi
# Fitter & output options
add_title "Fitter & output options"
section="Fitter" extract_params
section="Output" extract_params
local params=("remove_cosmo_limits" "doHesse" "nitermax" "set_stra" "set_tol")
for p in ${params}; do
parfile=$(sed -e 's#\('${p}'\) *\(.*\)#\1=\2#g' <<< ${parfile})
done
# Post process
function post_process() {
local tmpfile=$(mktemp)
echo -e ${parfile} > ${tmpfile}
if [[ ${likelihood:l} = hillipop ]]; then
if [[ ${spectrum} = TE ]]; then
echo -e ${parfile} > ${tmpfile}
parfile=$(cat ${tmpfile} | awk '/par +(Aps|Asz|Acib|AdustTT|AdustPP|Aksz|Aszxcib)/ {next}1')
echo -e ${parfile} > ${tmpfile}
parfile=$(cat ${tmpfile} | awk '/gauss1 +(Aps|Asz|Acib|AdustTT|AdustPP|Aksz|Aszxcib)/ {next}1')
elif [[ ${spectrum} = TT ]]; then
echo -e ${parfile} > ${tmpfile}
parfile=$(cat ${tmpfile} | awk '/par +(AdustTP|AdustPP)/ {next}1')
echo -e ${parfile} > ${tmpfile}
parfile=$(cat ${tmpfile} | awk '/gauss1 +(AdustTP|AdustPP)/ {next}1')
elif [[ ${spectrum} = EE ]]; then
echo -e ${parfile} > ${tmpfile}
parfile=$(cat ${tmpfile} | awk '/par +(Aps|Asz|Acib|AdustTT|AdustTP|Aksz|Aszxcib)/ {next}1')
echo -e ${parfile} > ${tmpfile}
parfile=$(cat ${tmpfile} | awk '/gauss1 +(Aps|Asz|Acib|AdustTT|AdustTP|Aksz|Aszxcib)/ {next}1')
elif [[ ${spectrum} = TE+EE ]]; then
echo -e ${parfile} > ${tmpfile}
parfile=$(cat ${tmpfile} | awk '/par +(Aps|Asz|Acib|AdustTT|Aksz|Aszxcib)/ {next}1')
echo -e ${parfile} > ${tmpfile}
parfile=$(cat ${tmpfile} | awk '/gauss1 +(Aps|Asz|Acib|AdustTT|Aksz|Aszxcib)/ {next}1')
elif [[ ${spectrum} = TT+EE ]]; then
echo -e ${parfile} > ${tmpfile}
parfile=$(cat ${tmpfile} | awk '/par +(Aps|Asz|Acib|AdustTP|Aksz|Aszxcib)/ {next}1')
echo -e ${parfile} > ${tmpfile}
parfile=$(cat ${tmpfile} | awk '/gauss1 +(Aps|Asz|Acib|AdustTP|Aksz|Aszxcib)/ {next}1')
fi
fi
# Remove duplicate lines
echo -e ${parfile} > ${tmpfile}
parfile=$(awk '{line=$0; gsub(" ", "", line); if (!NF || match($0, "###") || !seen[line]++) print $0}' ${tmpfile})
rm ${tmpfile}
}
post_process
if [[ -z ${output_parfile} ]]; then
echo -e $parfile
else
echo -e $parfile > ${output_parfile}
fi
pkgtools::at_function_exit
return 0
}
compdef _cmb::build_parfile cmb::build_parfile
function _cmb::build_parfile()
{
_arguments \
{-h,--help}'[print this help message]' \
{-d,--debug}'[debug mode]' \
{-D,--devel}'[devel mode]' \
'--with-bao[add BAO likelihood]' \
'--with-sn[add SNIA likelihood]' \
'--with-lensing[add lensing likelihood]' \
'--output-file[set output parfile name]:file:_files -g \*.par' \
{-s,--spectrum}'[set data spectrum (mandatory)]:spectrum:->spectrum' \
{-e,--engine}'[set Boltzmann engine (mandatory)]:engine:->engine' \
{-l,--likelihood}'[set likelihood type (mandatory)]:likelihood:->likelihood' \
'--setup-file[set the path to the file holding parameter default values]:file:_files -g \*.org' \
&& ret=0
case $state in
engine)
local -a _engines
_engines=(
pico:"Parameters for the Impatient Cosmologist"
class:"Cosmic Linear Anisotropy Solving System"
)
_describe -t _engines 'CMB engines' _engines && ret=0
;;
likelihood)
local -a _likes
_likes=(
clik:"Planck official likelihood"
Hillipop:"Hillipop likelihood with source modeling"
HiLLiPOP:"Default Hillipop likelihood"
)
_describe -t _likes 'CMB likelihoods' _likes && ret=0
;;
spectrum)
_specs=(
"TT"
"TE"
"EE"
"TE+EE"
"TT+EE"
"ALL"
)
_values 'Data spectrum' $_specs && ret=0
;;
esac
return ret
}
function --cmb::updown()
{
if [[ ! -f .cmb_updown ]]; then
pkgtools::msg_error "Missing .cmb_updown file!"
return 1
fi
# Grab destination
local to=$(cat .cmb_updown)/.
local from=$PWD/.
pkgtools::msg_devel "To $to"
pkgtools::msg_devel "From $from"
if [[ $1 = down ]]; then
rsync-synchronize $to $from
elif [[ $1 = up ]]; then
rsync-synchronize $from $to
else
pkgtools::msg_error "Missing modes (either up or down)!"
return 1
fi
return 0
}
alias cmb::download='--cmb::updown down'
alias cmb::upload='--cmb::updown up'
alias cmb::updownload='--cmb::updown up && --cmb::updown down'
function cmb::link_mcmc()
{
for f in mcmc.?.?.txt; do
ln -sf $f ${f/.?.txt/.txt}
done
for f in mcmc*.updated.yaml; do
ln -sf $f mcmc.updated.yaml
break
done
}
function cmb::reorganize_mcmc()
{
local args=($@)
rm=false
if [[ ${args[(r)--rm]} ]]; then
rm=true
copy=true
fi
copy=false
if [[ ${args[(r)--copy]} ]]; then
copy=true
fi
newpath=
if [[ ${args[(r)--no-new-path]} ]]; then
newpath=.
fi
for f in $@; do
if [[ ${f:0:1} == "-" ]]; then
continue
fi
fn=$(basename $f)
if [[ -z ${newpath} ]]; then
newpath=$(echo $f | awk -F_ '{for (i=1;i<NF;i++) printf("%s_",$i)}')
newpath=${newpath:0:-1}
mkdir -p ${newpath}
fi
dirpath=$(dirname $newpath)
nbr=$(echo $f | awk -F_ '{print $NF}')
re='^[0-9]+$'
if ! [[ ${nbr} =~ ${re} ]] ; then
continue
fi
(
cd ${newpath}
if ${copy}; then
exts=(checkpoint covmat input.yaml)
for ext in ${exts}; do
cp ${dirpath}/$fn/mcmc.${ext} ./mcmc.${nbr}.${ext}
done
cp ${dirpath}/$fn/mcmc*.txt ./mcmc.${nbr}.txt
cp ${dirpath}/$fn/mcmc.updated.yaml .
pkls=(${dirpath}/$fn/*.pkl(N))
for g in $pkls; do
name=$(basename $g)
cp $g ${name/.pkl/.${nbr}.pkl}
done
logs=(${dirpath}/$fn/*.log(N))
for g in $logs; do
name=$(basename $g)
cp $g ${name/.log/.${nbr}.log}
done
else
ln -sf ${dirpath}/$fn/mcmc*.txt ./mcmc.${nbr}.txt
if [ ! -f ./mcmc.${nbr}.updated.yaml ]; then
ln -sf ${dirpath}/$fn/mcmc.updated.yaml ./mcmc.${nbr}.updated.yaml
fi
if [ ! -f ./mcmc.updated.yaml ]; then
ln -sf ${dirpath}/$fn/mcmc.updated.yaml ./mcmc.updated.yaml
fi
fi
)
if ${rm}; then
rm -rf $f
fi
pkgtools::msg_notice "Reorganization of directory $fn and others done"
done
}
if [[ $HOSTNAME = cc* ]]; then
# alias qjob_my_total='echo -ne "Total number of jobs: ";qstat | tail -n+3 | wc -l'
# alias qjob_my_run='echo -ne "Number of running jobs: ";qstat -s r | tail -n+3 | wc -l'
# alias qdel_all="qstat | tail -n +3 | grep -v QLOGIN | sort -u -k1,1 | awk '{print \"qdel\",\$1}' | sh"
pkgtools::reset_variable SCRATCH_DIR /sps/nemo/scratch/garrido
pkgtools::reset_variable WORKDIR ${SCRATCH_DIR}/workdir
pkgtools::reset_variable SINGULARITY_CACHEDIR $SCRATCH_DIR/.singularity
function do_nemo_setup()
{
alias qjob_nemo_user='echo "Number of jobs run by NEMO users"; qstat -u \* -ext -s r| tail -n+3 | grep nemo | awk "{print \$5}" | sort | uniq -c'
alias qjob_summary='qjob_my_total; qjob_my_run; qjob_nemo_user'
# Unset to start from fresh config.
pkgtools::unset_variable LD_LIBRARY_PATH
pkgtools::unset_variable PATH
pkgtools::unset_variable CFLAGS
# bin directories
pkgtools::add_path_to_PATH /usr/bin
pkgtools::add_path_to_PATH /bin
# # Use up-to-date gcc
# local gcc_version=5.2.0
# local gcc_dir=/usr/local/gcc/${gcc_version}
# pkgtools::add_path_to_PATH ${gcc_dir}/bin
# pkgtools::add_path_to_LD_LIBRARY_PATH ${gcc_dir}/lib64
# pkgtools::reset_variable LIBRARY_PATH ${gcc_dir}/lib64
# Set brew cache directory (default ~/.cache)
pkgtools::reset_variable HOMEBREW_CACHE ${SCRATCH_DIR}/workdir/supernemo/software/brew/.cache
# pkgtools::reset_variable HOMEBREW_TEMP ${SCRATCH_DIR}/workdir/supernemo/snware/brew/.tmp
pkgtools::reset_variable HOMEBREW_MAKE_JOBS 4
# pkgtools::reset_variable HOMEBREW_CC gcc-${gcc_version:0:1}
# pkgtools::reset_variable HOMEBREW_CXX g++-${gcc_version:0:1}
}
function do_cmb_setup()
{
alias qjob_planck_user='echo "Number of jobs run by Planck users"; qstat -u \* -ext -s r| tail -n+3 | grep planck | awk "{print \$5}" | sort | uniq -c'
alias qjob_summary='qjob_my_total; qjob_my_run; qjob_planck_user'
# Unset to start from fresh config.
pkgtools::unset_variable LD_LIBRARY_PATH
pkgtools::unset_variable PATH
# bin directories
pkgtools::add_path_to_PATH /usr/bin
pkgtools::add_path_to_PATH /bin
pkgtools::add_path_to_PATH $HOME/.local/bin
# Unset planck PYHTONHOME
pkgtools::unset_variable PYTHONHOME
pkgtools::unset_variable PYTHONPATH
pkgtools::unset_variable CFLAGS
pkgtools::reset_variable LDLAGS "--no-warn-search-mismatch"
# # Use up-to-date gcc
# local gcc_version=5.2.0
# local gcc_dir=/usr/local/gcc/${gcc_version}
# pkgtools::add_path_to_PATH ${gcc_dir}/bin
# pkgtools::add_path_to_LD_LIBRARY_PATH ${gcc_dir}/lib64
# # Use python 3.6.5
# local python_version=3.6.5
# local python_dir=/usr/local/python/${python_version}
# pkgtools::add_path_to_PATH ${python_dir}/bin
# pkgtools::add_path_to_LD_LIBRARY_PATH ${python_dir}/lib
# # MKL
# local intel_compiler_library=/usr/local/intel/2018/compilers_and_libraries/linux
# pkgtools::reset_variable IOMP5LIB ${intel_compiler_library}/lib/intel64
# pkgtools::add_path_to_LD_LIBRARY_PATH ${IOMP5LIB}
# pkgtools::reset_variable MKLROOT ${intel_compiler_library}/mkl
# pkgtools::reset_variable MKLLIB ${MKLROOT}/lib/intel64
# pkgtools::add_path_to_LD_LIBRARY_PATH $MKLLIB
# pkgtools::add_path_to_PATH ${intel_compiler_library}/mpi/bin64
# pkgtools::add_path_to_LD_LIBRARY_PATH ${intel_compiler_library}/mpi/lib64
function run_cmb_mcmc()
{
pkgtools::default_values
pkgtools::at_function_enter run_cmb_mcmc
local mcmc=$CAMELROOT/$CMTCONFIG/mcmc
local parfile
local nsamples=500000
local nchain=4
local ncore=8
local queue=mc_long
local project=P_planck
local with_singularity=false
local singularity_img="cmb_centos7.simg"
while [ -n "$1" ]; do
local token="$1"
if [ ${token[0,1]} = - ]; then
local opt=${token}
if [[ ${opt} = -h || ${opt} = --help ]]; then
echo "Usage:\n run_cmb_mcmc [options] parfile"
echo "Options:"
echo " -h [--help] print this help message"
echo " -d [--debug] debug mode"
echo " -D [--devel] devel mode"
echo " --nsamples set number of MCMC samples"
echo " --nchain set number of qjob chains"
echo " --ncore set number of qjob cores"
return 0
elif [[ ${opt} = -d || ${opt} = --debug ]]; then
pkgtools::msg_using_debug
elif [[ ${opt} = -D || ${opt} = --devel ]]; then
pkgtools::msg_using_devel
elif [[ ${opt} = --nsamples ]]; then
shift 1
nsamples="$1"
elif [[ ${opt} = --nchain ]]; then
shift 1
nchain="$1"
elif [[ ${opt} = --ncore ]]; then
shift 1
ncore="$1"
elif [[ ${opt} = --with-singularity ]]; then
with_singularity=true
elif [[ ${opt} = --singularity-image ]]; then
shift 1
singularity_img="$1"
fi
else
parfile="${token}"
fi
shift 1
done
if [[ ${PKGMAN_SETUP_DONE} != cmb && ${with_singularity} = false ]]; then
pkgtools::msg_error "CMB configuration not setup!"
pkgtools::at_function_exit
return 1
fi
if [ -z ${parfile} ]; then
pkgtools::msg_error "Missing parameter file!"
pkgtools::at_function_exit
return 1
elif [ ! -f ${parfile} ]; then
pkgtools::msg_error "Parameter file '${parfile}' does not exist!"
pkgtools::at_function_exit
return 1
fi
# Make sure PWD is added
local parfile_dir=$(dirname ${parfile})
if [[ ${parfile_dir} = . ]]; then
parfile_dir=$PWD
parfile=$PWD/${parfile}
fi
local parfile_base=$(basename ${parfile})
local parfile_name=${parfile_base%.*}
# Get total number of MCMC parameters
local ndim=$(awk '$1=="par"{n++} END{print n}' ${parfile})
pkgtools::msg_notice "Number of parameters : ${ndim}"
# Get covariance file
local covfile=${parfile/.par/.cov}
if [ ! -f ${covfile} ]; then
pkgtools::msg_error "Missing associated covariance file!"
pkgtools::at_function_exit
return 1
fi
# Create output directory
local mcmc_dir=${parfile_dir}/${parfile_name}_MC
if [ -d ${mcmc_dir} ]; then
pkgtools::msg_warning "Directory '${mcmc_dir}' already exist!"
pkgtools::yesno_question "Do you want to remove it ?"
if $(pkgtools::answer_is_no); then
pkgtools::at_function_exit
return 0
fi
rm -rf ${mcmc_dir}
fi
mkdir -p ${mcmc_dir}
pkgtools::enter_directory ${mcmc_dir}
# Setup MCMC
cp ${covfile} .
# Remove precision from parfile
grep -v precision ${parfile} > ${parfile_base}
{
echo "dim=$ndim"
echo "algo=ada"
echo "length=$nsamples"
if ${with_singularity}; then
echo "proposal_cov=/mnt/${parfile_name}_MC/$(basename ${covfile})"
else
echo "proposal_cov=$PWD/$(basename ${covfile})"
fi
echo "ts=10000"
echo "t0=2000"
echo "scale=0.001"
echo "do_move=false"
} >> ${parfile_base}
# Create job script
local sysname=${SYSNAME/*_/}
if [[ ${sysname} = sl7 ]]; then
sysname="cl7"
fi
if ${with_singularity}; then
local singularity_software_path="/home/cmb/software"
{
echo "#!/bin/bash"
echo "source ${singularity_software_path}/cmb_setup.sh"
echo "cd /mnt/${parfile_name}_MC"
echo "cp ${parfile_base} mcmc\${SGE_TASK_ID}.par"
echo "echo \"seed=\$RANDOM\" >> mcmc\${SGE_TASK_ID}.par"
echo
echo "mcmc mcmc\${SGE_TASK_ID}.par samples\${SGE_TASK_ID}.txt > output\${SGE_TASK_ID}.log 2>&1"
echo
echo "cp ar_vs_length.txt ar_vs_length\${SGE_TASK_ID}.txt"
echo "cp scale_vs_length.txt scale_vs_length\${SGE_TASK_ID}.txt"
echo "cp corr.txt corr\${SGE_TASK_ID}.txt"
echo
} >> ${parfile_name}.sh && chmod u+x ${parfile_name}.sh
{
echo "#$ -l sps=1"
echo "#$ -l os=${sysname}"
echo "#$ -j y"
echo "#$ -R y"
echo "#$ -t 1-$nchain"
echo "#$ -N ${parfile_name}"
echo -n "SINGULARITYENV_SGE_TASK_ID=\${SGE_TASK_ID} singularity exec -B /sps/planck/camel/CentOS7/data:/home/cmb/data "
echo -n "-B $(dirname $PWD):/mnt --cleanenv $SINGULARITY_CACHEDIR/${singularity_img} /mnt/${parfile_name}_MC/${parfile_name}.sh"
echo
} >> ${parfile_name}_singularity.sh
else
{
echo "#$ -l sps=1"
echo "#$ -l os=${sysname}"
echo "#$ -j y"
echo "#$ -R y"
echo "#$ -t 1-$nchain"
echo "#$ -N ${parfile_name}"
# echo "#$ -m be"
echo
echo "echo \"running on : \$(uname -a)\""
echo "source $(dirname $(which python))/activate"
echo "export OMP_NUM_THREADS=$ncore"
echo "cd \$TMPDIR"
echo "cp ${mcmc} ."
echo "cp $PWD/${parfile_base} mcmc\${SGE_TASK_ID}.par"
echo "cp $PWD/${parfile_base/.par/.cov} ."
echo "echo \"seed=\$RANDOM\" >> mcmc\${SGE_TASK_ID}.par"
echo
echo "cp mcmc\${SGE_TASK_ID}.par $PWD"
echo
echo "./mcmc mcmc\${SGE_TASK_ID}.par $PWD/samples\${SGE_TASK_ID}.txt > $PWD/output\${SGE_TASK_ID}.log 2>&1"
echo
echo "cp ar_vs_length.txt $PWD/ar_vs_length\${SGE_TASK_ID}.txt"
echo "cp scale_vs_length.txt $PWD/scale_vs_length\${SGE_TASK_ID}.txt"
echo "cp corr.txt $PWD/corr\${SGE_TASK_ID}.txt"
echo
echo "qstat -j \${JOB_ID} -nenv"
} >> ${parfile_name}.sh
fi
pkgtools::msg_notice "Parameter file : ${parfile}"
pkgtools::msg_notice "Covariance file : ${covfile}"
pkgtools::msg_notice "MCMC directory : ${mcmc_dir}"
pkgtools::msg_notice "CC jobs setup :"
pkgtools::msg_notice " - Number of chains : ${nchain}"
pkgtools::msg_notice " - Number of cores : ${ncore}"
pkgtools::msg_notice " - Queue type : ${queue}"
pkgtools::msg_notice " - Project type : ${project}"
if ${with_singularity}; then
pkgtools::msg_notice " - Singularity image : $SINGULARITY_CACHEDIR/${singularity_img}"
fi
pkgtools::yesno_question "Start jobs ?"
if $(pkgtools::answer_is_yes); then
if ${with_singularity}; then
qsub -P ${project} -pe multicores ${ncore} -q ${queue} -o $PWD ${parfile_name}_singularity.sh
else
qsub -P ${project} -pe multicores ${ncore} -q ${queue} -o $PWD ${parfile_name}.sh
fi
fi
pkgtools::exit_directory
pkgtools::at_function_exit
return 0
}
compdef _run_cmb_mcmc run_cmb_mcmc
function _run_cmb_mcmc ()
{
_arguments \
{-h,--help}'[print this help message]' \
{-d,--debug}'[debug mode]' \
{-D,--devel}'[devel mode]' \
'--nsamples[set number of MCMC samples]' \
'--nchain[set number of qjob chains]' \
'--ncore[set number of qjob cores]' \
'--with-singularity[use singularity]' \
'--singularity-image[set singularity image name]' \
'*:file:_files -g "*.par"' \
&& ret=0
return ret
}
function run_cmb_profile()
{
pkgtools::default_values
pkgtools::at_function_enter run_cmb_profile
if [[ ${PKGMAN_SETUP_DONE} != cmb ]]; then
pkgtools::msg_error "CMB configuration not setup!"
pkgtools::at_function_exit
return 1
fi
local dry_run=false
local profile_exe=$CAMELROOT/$CMTCONFIG/Profile
local parfile
local parameter_name
local -a parameter_range
local nchain=10
local ncore=8
local queue=mc_long
local project=P_planck
while [ -n "$1" ]; do
local token="$1"
if [ ${token[0,1]} = - ]; then
local opt=${token}
if [[ ${opt} = -h || ${opt} = --help ]]; then
echo "Usage:\n run_cmb_profile [options] parfile"
echo "Options:"
echo " -h [--help] print this help message"
echo " -d [--debug] debug mode"
echo " -D [--devel] devel mode"
echo " --dry-run do not start qsub jobs"
echo " --parameter-name set the name of the parameter to profile"
echo " --parameter-range set the parameter's range to profile"
echo " --nchain set number of qjob chains"
echo " --ncore set number of qjob cores"
return 0
elif [[ ${opt} = -d || ${opt} = --debug ]]; then
pkgtools::msg_using_debug
elif [[ ${opt} = -D || ${opt} = --devel ]]; then
pkgtools::msg_using_devel
elif [[ ${opt} = --dry-run ]]; then
dry_run=true
elif [[ ${opt} = --parameter-name ]]; then
shift 1
parameter_name="$1"
elif [[ ${opt} = --parameter-range ]]; then
shift 1
local pars=$1
parameter_range=( ${=pars} )
elif [[ ${opt} = --nchain ]]; then
shift 1
nchain="$1"
elif [[ ${opt} = --ncore ]]; then
shift 1
ncore="$1"
fi
else
parfile="${token}"
fi
shift 1
done
if [ -z "${parfile}" ]; then
pkgtools::msg_error "Missing parameter file!"
pkgtools::at_function_exit
return 1
elif [ ! -f "${parfile}" ]; then
pkgtools::msg_error "Parameter file '${parfile}' does not exist!"
pkgtools::at_function_exit
return 1
fi
# Make sure PWD is added
local parfile_dir=$(dirname ${parfile})
if [[ ${parfile_dir} = . ]]; then
parfile_dir=$PWD
parfile=$PWD/${parfile}
fi
local parfile_base=$(basename ${parfile})
local parfile_name=${parfile_base%.*}
if [ -z "${parameter_name}" ]; then
pkgtools::msg_error "Missing parameter name!"
pkgtools::at_function_exit
return 1
fi
if [ -z "${parameter_range}" ]; then
pkgtools::msg_error "Missing parameter range!"
pkgtools::at_function_exit
return 1
fi
# Create output directory
local profile_dir=${parfile_dir}/${parfile_name}_${parameter_name}_prof
if [ -d ${profile_dir} ]; then
pkgtools::msg_warning "Directory '${profile_dir}' already exist!"
pkgtools::yesno_question "Do you want to remove it ?"
if $(pkgtools::answer_is_no); then
pkgtools::at_function_exit
return 0
fi
rm -rf ${profile_dir}
fi
mkdir -p ${profile_dir}
local sysname=${SYSNAME/*_/}
if [[ ${sysname} = sl7 ]]; then
sysname="cl7"
fi
pkgtools::msg_notice "Parameter file : ${parfile}"
pkgtools::msg_notice "Parameter name : ${parameter_name}"
pkgtools::msg_notice "Parameter range : ${=parameter_range}"
pkgtools::msg_notice "Profile directory : ${profile_dir}"
pkgtools::msg_notice "CC jobs setup :"
pkgtools::msg_notice " - Number of chains : ${nchain}"
pkgtools::msg_notice " - Number of cores : ${ncore}"
pkgtools::msg_notice " - Queue type : ${queue}"
pkgtools::msg_notice " - Project type : ${project}"
pkgtools::msg_notice " - Machine type : ${sysname}"
pkgtools::yesno_question "Start jobs ?"
if $(pkgtools::answer_is_no); then
pkgtools::at_function_exit
return 0
fi
# Looping over parameter range
for par in ${parameter_range}; do
pkgtools::msg_notice "Starting job for ${parameter_name}=${par}..."
local run_name="${parameter_name}_${par}"
local par_dir="${profile_dir}/${run_name}"
if [ -d ${par_dir} ]; then
rm -rf ${par_dir}
else
mkdir -p ${par_dir}
fi
pkgtools::enter_directory ${par_dir}
# Create job script
{
echo "#$ -l sps=1"
echo "#$ -l os=${sysname}"
echo "#$ -j y"
echo "#$ -R y"
echo "#$ -t 1-$nchain"
echo "#$ -N ${parfile_name}_${run_name}"
# echo "#$ -m be"
echo
echo "echo \"running on : \$(uname -a)\""
echo "source $(dirname $(which python))/activate"
echo "export OMP_NUM_THREADS=$ncore"
echo "cd \$TMPDIR"
echo "cp ${parfile} profile.par"
echo "cp ${profile_exe} ."
echo "cp ${CAMELROOT}/work/tools/awk/genrand.awk ."
echo
echo "current_profile=profile\${SGE_TASK_ID}.par"
echo "if [ \${SGE_TASK_ID} -eq 1 ]; then"
echo "cp profile.par \${current_profile}"
echo "else"
echo "awk -v seed=\$RANDOM -f genrand.awk profile.par > \${current_profile}"
echo "fi"
echo
echo "cp \${current_profile} $PWD"
echo
echo "./Profile \${current_profile} ${parameter_name} ${par} ${par} 1 best_fit > $PWD/output\${SGE_TASK_ID}.log 2>&1"
echo
echo "cp best_fit $PWD/best_fit\${SGE_TASK_ID}.txt"
echo
echo "qstat -j \${JOB_ID} -nenv"
} >> profile.sh
# Running job
if ! ${dry_run}; then
qsub -P ${project} -pe multicores ${ncore} -q ${queue} -o $PWD profile.sh
fi
pkgtools::exit_directory
done
pkgtools::at_function_exit
return 0
}
compdef _run_cmb_profile run_cmb_profile
function _run_cmb_profile ()
{
_arguments \
{-h,--help}'[print this help message]' \
{-d,--debug}'[debug mode]' \
{-D,--devel}'[devel mode]' \
'--dry-run[do not start qsub jobs]' \
'--nchain[set number of qjob chains]' \
'--ncore[set number of qjob cores]' \
'--parameter-name[set the name of the parameter to profile]' \
'--parameter-range[set the parameter range to profile]' \
'*:file:_files -g \*.par' \
&& ret=0
return ret
}
function run_cmb_minimize()
{
pkgtools::default_values
pkgtools::at_function_enter run_cmb_minimize
if [[ ${PKGMAN_SETUP_DONE} != cmb ]]; then
pkgtools::msg_error "CMB configuration not setup!"
pkgtools::at_function_exit
return 1
fi
local dry_run=false
local run_local=false
local minimize_exe=$CAMELROOT/$CMTCONFIG/Minimize
local parfile
local nchain=5
local ncore=8
local queue=mc_long
local project=P_planck
local yes=false
while [ -n "$1" ]; do
local token="$1"
if [ ${token[0,1]} = - ]; then
local opt=${token}
if [[ ${opt} = -h || ${opt} = --help ]]; then
echo "Usage:\n run_cmb_minimize [options] parfile"
echo "Options:"
echo " -h [--help] print this help message"
echo " -d [--debug] debug mode"
echo " -D [--devel] devel mode"
echo " --dry-run do not start qsub jobs"
echo " --nchain set number of qjob chains"
echo " --ncore set number of qjob cores"
echo " --local do not send jobs to CC farms"
echo " --yes do not ask question"
return 0
elif [[ ${opt} = -d || ${opt} = --debug ]]; then
pkgtools::msg_using_debug
elif [[ ${opt} = -D || ${opt} = --devel ]]; then
pkgtools::msg_using_devel
elif [[ ${opt} = --dry-run ]]; then
dry_run=true
elif [[ ${opt} = --nchain ]]; then
shift 1
nchain="$1"
elif [[ ${opt} = --ncore ]]; then
shift 1
ncore="$1"
elif [[ ${opt} = --local ]]; then
run_local=true
elif [[ ${opt} = --yes ]]; then
yes=true
fi
else
parfile="${token}"
fi
shift 1
done
if [ -z "${parfile}" ]; then
pkgtools::msg_error "Missing parameter file!"
pkgtools::at_function_exit
return 1
elif [ ! -f "${parfile}" ]; then
pkgtools::msg_error "Parameter file '${parfile}' does not exist!"
pkgtools::at_function_exit
return 1
fi
# Make sure PWD is added
local parfile_dir=$(dirname ${parfile})
if [[ ${parfile_dir} = . ]]; then
parfile_dir=$PWD
parfile=$PWD/${parfile}
fi
local parfile_base=$(basename ${parfile})
local parfile_name=${parfile_base%.*}
# Create output directory
local minimize_dir=${parfile_dir}/${parfile_name}_min
if [ -d ${minimize_dir} ]; then
pkgtools::msg_warning "Directory '${minimize_dir}' already exist!"
pkgtools::yesno_question "Do you want to remove it ?"
if $(pkgtools::answer_is_no); then
pkgtools::at_function_exit
return 0
fi
rm -rf ${minimize_dir}
fi
mkdir -p ${minimize_dir}
pkgtools::msg_notice "Parameter file : ${parfile}"
pkgtools::msg_notice "Minimize directory : ${minimize_dir}"
pkgtools::msg_notice "CC jobs setup :"
pkgtools::msg_notice " - Number of chains : ${nchain}"
pkgtools::msg_notice " - Number of cores : ${ncore}"
pkgtools::msg_notice " - Queue type : ${queue}"
pkgtools::msg_notice " - Project type : ${project}"
if ! ${yes}; then
pkgtools::yesno_question "Start jobs ?"
if $(pkgtools::answer_is_no); then
pkgtools::at_function_exit
return 0
fi
fi
pkgtools::enter_directory ${minimize_dir}
# Create job script
{
local sysname=${SYSNAME/*_/}
if [[ ${sysname} = sl7 ]]; then
sysname="cl7"
fi
echo "#$ -l sps=1"
echo "#$ -l os=${sysname}"
echo "#$ -j y"
echo "#$ -R y"
echo "#$ -t 1-$nchain"
echo "#$ -N ${parfile_name}"
# echo "#$ -m be"
echo
echo "echo \"running on : \$(uname -a)\""
echo "source $(dirname $(which python))/activate"
echo "export OMP_NUM_THREADS=$ncore"
echo "cd \$TMPDIR"
echo "cp ${parfile} minimize.par"
echo "cp ${minimize_exe} ."
echo "cp ${CAMELROOT}/work/tools/awk/genrand.awk ."
echo
echo "current_minimize=minimize\${SGE_TASK_ID}.par"
echo "if [ \${SGE_TASK_ID} -eq 1 ]; then"
echo "cp minimize.par \${current_minimize}"
echo "else"
echo "awk -v seed=\$RANDOM -f genrand.awk minimize.par > \${current_minimize}"
echo "fi"
echo
echo "cp \${current_minimize} $PWD"
echo
echo "./Minimize \${current_minimize} best_fit covmat > $PWD/output\${SGE_TASK_ID}.log 2>&1"
echo
echo "cp best_fit $PWD/best_fit\${SGE_TASK_ID}.txt"
echo "cp covmat $PWD/covmat\${SGE_TASK_ID}.txt"
echo
} >> minimize.sh
# Running job
if ! ${dry_run}; then
if ! ${run_local}; then
qsub -P ${project} -pe multicores ${ncore} -q ${queue} -o $PWD minimize.sh
else
chmod u+x minimize.sh
for ((i=1; i<=${nchain};i++)); do
local dir=$PWD
TMPDIR=$(mktemp -d)
(
cd $TMPDIR
SGE_TASK_ID=$i ${dir}/minimize.sh
)
rm -fr $TMPDIR
sleep 0
done
fi
fi
pkgtools::exit_directory
pkgtools::at_function_exit
return 0
}
compdef _run_cmb_minimize run_cmb_minimize
function _run_cmb_minimize ()
{
_arguments \
{-h,--help}'[print this help message]' \
{-d,--debug}'[debug mode]' \
{-D,--devel}'[devel mode]' \
'--dry-run[do not start qsub jobs]' \
'--nchain[set number of qjob chains]' \
'--ncore[set number of qjob cores]' \
'--local[do it locally]' \
'--yes[force execution with no question]' \
'*:file:_files -g \*.par' \
&& ret=0
return ret
}
}
fi
if (( $+commands[squeue] )); then
alias _squeue='squeue -u $(whoami)'
alias sjob_my_total='echo -ne "Total number of jobs: ";_squeue | tail -n+2 | wc -l'
alias sjob_my_run='echo -ne "Number of running jobs: ";_squeue -t R | tail -n+2 | wc -l'
alias scancel_all="scancel \$(_squeue | awk '{print \$1}' | awk -F_ '{print \$1}' | grep -v JOBID | sort -u | tr '\n' ' ')"
alias slogin="srun -t 0-01:00:00 -n 4 --mem 2G --pty zsh -i"
fi
Implementation of J. Peloton data sheet https://gist.github.com/JulienPeloton/bb77476623a090c60ee1b7c2a2791699
function start_openstack()
{
if [ ! -d /tmp/vdata ]; then
pkgtools::msg_notice "Creating virtual env. for openstack dev."
python -m venv /tmp/vdata && source /tmp/vdata/bin/activate
pip install -U pip python-openstackclient python-glanceclient
fi
pkgtools::msg_notice "Setting permissions"
source ~/.certs/os_lal.rc
}
function activate_cxxflags ()
{
pkgtools::at_function_enter activate_cxxflags
export CXXFLAGS="-Waddress -Warray-bounds -Wc++11-compat -Wchar-subscripts \
-Wenum-compare -Wcomment -Wformat -Wmain -Wmaybe-uninitialized -Wmissing-braces \
-Wnonnull -Wparentheses -Wreorder -Wreturn-type -Wsequence-point -Wsign-compare \
-Wstrict-aliasing -Wstrict-overflow=1 -Wswitch -Wtrigraphs -Wuninitialized \
-Wunknown-pragmas -Wunused-function -Wunused-label -Wunused-value \
-Wunused-variable -Wvolatile-register-var -Wclobbered -Wempty-body \
-Wignored-qualifiers -Wmissing-field-initializers -Wsign-compare -Wtype-limits \
-Wuninitialized -Wunused-parameter -Wunused-but-set-parameter"
pkgtools::at_function_exit
return 0
}
function make_org_doc ()
{
pkgtools::default_values
pkgtools::at_function_enter make_org_doc
# Internal functions
--mod::usage () {
}
local append_list_of_options
local append_list_of_arguments
local type
local title
local author="Xavier Garrido"
local email="[email protected]"
local latex_class
local latex_class_options
local base_directory
while [ -n "$1" ]; do
local token=$1
if [ "${token[0,1]}" = "-" ]; then
local opt=${token}
append_list_of_options+="${opt} "
if [ "${opt}" = "-h" -o "${opt}" = "--help" ]; then
--mod::usage
return 0
elif [ "${opt}" = "-d" -o "${opt}" = "--debug" ]; then
pkgtools::msg_using_debug
elif [ "${opt}" = "-D" -o "${opt}" = "--devel" ]; then
pkgtools::msg_using_devel
elif [ "${opt}" = "-v" -o "${opt}" = "--verbose" ]; then
pkgtools::msg_using_verbose
elif [ "${opt}" = "-W" -o "${opt}" = "--no-warning" ]; then
pkgtools::msg_not_using_warning
elif [ "${opt}" = "-q" -o "${opt}" = "--quiet" ]; then
pkgtools::msg_using_quiet
export PKGTOOLS_MSG_QUIET=1
elif [ "${opt}" = "-i" -o "${opt}" = "--interactive" ]; then
pkgtools::ui_interactive
elif [ "${opt}" = "-b" -o "${opt}" = "--batch" ]; then
pkgtools::ui_batch
elif [ "${opt}" = "--gui" ]; then
pkgtools::ui_using_gui
elif [ "${opt}" = "--doc-type" ]; then
shift 1; type="$1"
elif [ "${opt}" = "--title" ]; then
shift 1; title="$1"
elif [ "${opt}" = "--author" ]; then
shift 1; author="$1"
elif [ "${opt}" = "--email" ]; then
shift 1; email="$1"
elif [ "${opt}" = "--latex-class" ]; then
shift 1; latex_class="$1"
elif [ "${opt}" = "--latex-class-options" ]; then
shift 1; latex_class_options="$1"
elif [ "${opt}" = "--base-directory" ]; then
shift 1; base_directory="$1"
fi
else
arg=${token}
if [ "x${arg}" != "x" ]; then
append_list_of_arguments+="${arg} "
fi
fi
shift 1
done
local dirname="$(echo ${append_list_of_arguments} | awk '{print $1}')"
if [ "${dirname}" = "" ]; then
pkgtools::msg_error "You must give a repository name !"
pkgtools::at_function_exit
return 1
fi
if [ ! -n "$type" ]; then
pkgtools::msg_error "Missing type of documents !"
pkgtools::at_function_exit
return 1
fi
case $type in
(talk)
test ! -n "$base_directory" && base_directory=~/Workdir/Talk
test ! -n "$latex_class" && latex_class="beamer"
test ! -n "$latex_class_options" && latex_class_options="snemo,nologo"
;;
(note)
;;
(article)
test ! -n "$base_directory" && base_directory=~/Workdir/Papers/pub/nemo-note
test ! -n "$latex_class" && latex_class="snemo-article"
test ! -n "$latex_class_options" && latex_class_options=""
;;
esac
pkgtools::msg_devel "base_directory=${base_directory}"
pkgtools::msg_devel "latex_class=${latex_class}"
pkgtools::msg_devel "latex_class_options=${latex_class_options}"
# Remove last space
append_list_of_arguments=${append_list_of_arguments%?}
append_list_of_options=${append_list_of_options%?}
pkgtools::msg_devel "append_list_of_arguments=${append_list_of_arguments}"
pkgtools::msg_devel "append_list_of_options=${append_list_of_options}"
local directory=${base_directory}/${dirname}
--mod::create_directories () {
mkdir -p ${directory}/{pdf,figures}
}
--mod::generate_org_skeleton () {
local org_file
case $type in
(talk)
org_file=${directory}/talk.org
;;
(article)
org_file=${directory}/note-$(date +%y%m%d).org
;;
esac
echo "#+TITLE: ${title}" > ${org_file}
echo "#+AUTHOR: ${author}" >> ${org_file}
echo "#+EMAIL: ${email}" >> ${org_file}
echo "#+DATE: $(date +%d/%m/%Y)" >> ${org_file}
case $type in
(talk)
echo "#+OPTIONS: toc:nil num:nil author:nil email:t ^:{}" >> ${org_file}
echo "#+STARTUP: beamer" >> ${org_file}
;;
(article)
echo "#+OPTIONS: toc:nil date:nil author:nil email:t ^:{}" >> ${org_file}
echo "#+STARTUP: entitiespretty" >> ${org_file}
;;
esac
echo "#+LATEX_CLASS: ${latex_class}" >> ${org_file}
echo "#+LATEX_CLASS_OPTIONS: [${latex_class_options}]" >> ${org_file}
echo "" >> ${org_file}
# Special setup for article
case $type in
(article)
begin="#+BEGIN"
end="#+END"
echo "* Abstract :ignoreheading:" >> ${org_file}
echo "${begin}_ABSTRACT" >> ${org_file}
echo "${end}_ABSTRACT" >> ${org_file}
echo "* Introduction :ignoresecnumber:" >> ${org_file}
echo "* Conclusion :ignoresecnumber:" >> ${org_file}
echo "* References :ignoresecnumber:" >> ${org_file}
echo "${begin}_BIBLIOGRAPHY" >> ${org_file}
echo "${end}_BIBLIOGRAPHY" >> ${org_file}
;;
esac
}
--mod::generate_makefile () {
local make_file=${directory}/Makefile
case $type in
(talk)
echo "# -*- mode: makefile; -*-" > ${make_file}
echo "EMACS=emacs" >> ${make_file}
echo "BATCH=\$(EMACS) --batch --eval '(setq starter-kit-dir \"~/.emacs.d\")' \
--load '~/.emacs.d/starter-kit-org.el'" >> ${make_file}
echo "files_org = \$(wildcard *.org)" >> ${make_file}
echo "files_pdf = \$(files_org:.org=.pdf)" >> ${make_file}
echo >> ${make_file}
echo "all: \$(files_pdf)" >> ${make_file}
echo >> ${make_file}
echo "%.pdf: %.org" >> ${make_file}
echo "\t@echo \"NOTICE: Exporting \$< to pdf...\";" >> ${make_file}
echo "\t@\$(BATCH) --visit \"\$<\" --funcall org-beamer-export-to-pdf" >> ${make_file}
echo "\t@cp \$@ pdf/\${@:.pdf=_${dirname}.pdf}" >> ${make_file}
echo >> ${make_file}
echo "tar : clean" >> ${make_file}
echo "\t@mkdir -p tar" >> ${make_file}
echo "\t@cd tar && tar --exclude=\"../.git*\" --exclude=\"../tar\" -czvf talk_{dirname}.tar.gz .">> ${make_file}
echo >> ${make_file}
echo "clean:" >> ${make_file}
echo "\t@rm -rf latex.d *.tex *.pdf *.fdb* *~ *.el tar" >> ${make_file}
echo "\t@rm -rf *.out *.fls *.toc *.aux *.snm *.nav *.log" >> ${make_file}
;;
(article)
echo "# -*- mode: makefile; -*-" > ${make_file}
echo "EMACS=emacs" >> ${make_file}
echo "BATCH=\$(EMACS) --batch --eval '(setq starter-kit-dir \"~/.emacs.d\")' \
--load '~/.emacs.d/starter-kit-org.el'" >> ${make_file}
echo "files_org = \$(wildcard note*.org)" >> ${make_file}
echo "files_pdf = doc/pdf/\$(files_org:.org=.pdf)" >> ${make_file}
echo "files_html = doc/html/\$(files_org:.org=.html)" >> ${make_file}
echo >> ${make_file}
echo "all: pdf" >> ${make_file}
echo >> ${make_file}
echo "pdf: \$(files_pdf)" >> ${make_file}
echo "doc/pdf/%.pdf: %.org" >> ${make_file}
echo "\t@echo \"NOTICE: Exporting $< to pdf...\"" >> ${make_file}
echo "\t@zsh -i -c \"org-pages --pdf --debug generate\"" >> ${make_file}
echo >> ${make_file}
echo "html: \$(files_html)" >> ${make_file}
echo "doc/html/%.html: %.org" >> ${make_file}
echo "\t@echo \"NOTICE: Exporting $< to html...\"" >> ${make_file}
echo "\t@zsh -i -c \"org-pages --html --debug generate\"" >> ${make_file}
echo >> ${make_file}
echo "clean:" >> ${make_file}
echo "\t@rm -rf latex.d doc *.tex *.pdf *.toc *.fdb* *~ README.el" >> ${make_file}
echo >> ${make_file}
echo ".PHONY: all pdf html clean" >> ${make_file}
;;
esac
}
--mod::generate_gitignore () {
local gitignore=${directory}/.gitignore
echo "/*.pdf" >> ${gitignore}
echo "/*.tex" >> ${gitignore}
echo "/*.auxlock" >> ${gitignore}
echo "/*.vrb" >> ${gitignore}
echo "/*.fdb_latexmk" >> ${gitignore}
echo "/*.fls" >> ${gitignore}
echo "/*.aux" >> ${gitignore}
echo "*~" >> ${gitignore}
echo "latex.d/" >> ${gitignore}
}
--mod::import_doc () {
(
local svn_directory
case $type in
(talk)
svn_directory=https://svn.lal.in2p3.fr/users/garrido/Talk
;;
(article)
svn_directory=https://svn.lal.in2p3.fr/users/garrido/Publications/nemo-note
;;
esac
if [ -d ${directory}/.git ]; then
pkgtools::msg_warning "Directory '${directory}' is already under git-svn !"
return 0
fi
svn mkdir ${svn_directory}/${dirname} -m "create ${dirname} directory"
svn import ${directory} ${svn_directory}/${dirname} -m "import trunk directory"
rm -rf ${base_directory}/${dirname}
mkdir -p ${base_directory}/${dirname}
cd ${base_directory}/${dirname}
git svn init --prefix=svn/ --trunk=. ${svn_directory}/${dirname}
git svn fetch
)
}
--mod::create_directories
--mod::generate_org_skeleton
--mod::generate_makefile
--mod::generate_gitignore
--mod::import_doc
# Finally goto the directory
cd ${base_directory}/${dirname}
unset title author email latex_class latex_class_options
unset dirname directory base svn_directory org_file
unset append_list_of_arguments append_list_of_options
unfunction -- --mod::usage
unfunction -- --mod::import_doc
unfunction -- --mod::generate_gitignore
unfunction -- --mod::generate_makefile
unfunction -- --mod::generate_org_skeleton
unfunction -- --mod::create_directories
pkgtools::at_function_exit
return 0
}
# Connect completion system
compdef _make_org_doc make_org_doc
_make_org_doc () {
_arguments -C \
'(-h --help)'{-h,--help}'[print help message]' \
'(-v --verbose)'{-v,--verbose}'[produce verbose logging]' \
'(-d --debug)'{-d,--debug}'[produce debug logging]' \
'(-D --devel)'{-D,--devel}'[produce devel logging]' \
--doc-type'[set document type]:type:->type' \
--title'[set talk title]' \
--author'[set author name]' \
--email'[set email]' \
--latex-class'[set LaTeX class name]:class:->class' \
--latex-class-options'[set LaTeX class options]:class-options:->class-options' \
--base-directory'[directory to put document]' \
'*: :->args' && ret=0
case $state in
(type)
local types; types=('talk' 'article' 'note')
_describe -t 'types' 'type' types && ret=0
;;
(class)
local classes; classes=('beamer')
_describe -t 'classes' 'class' classes && ret=0
;;
(class-options)
local class_options; class_options=(
'snemo' 'cpp_teaching' 'ddpfo'
'nologo' 'notitlelogo' 'noheaderlogo'
)
_describe -t 'class-options' 'option' class_options && ret=0
;;
(args)
local dirname; dirname=($(date +%y%m%d)_)
_describe -t 'dirname' 'dirname' dirname && ret=0
;;
esac
}
This function parse the content of a C++ program and extract command line options passed with boost::program_options
function parse_cpp_program_options ()
{
pkgtools::at_function_enter parse_cpp_program_options
# Internal function to stream 'echo' command
__parse ()
{
local find_begin_description=0
local find_end_description=1
local data_type=""
for token in $(sed -n '/add_options/,/;/p' $1)
do
if [[ "$token" == *';'* ]]; then
break
fi
token=${token/\\n/ }
pkgtools::msg_devel "token = ${token}"
if [[ "$token" == *'"'* ]]; then
# Get option indentificator
if [[ "$token" == *'("'* ]]; then
if [[ "$token" == *'")'* ]]; then
continue
fi
if [ ${find_end_description} -eq 0 ]; then
data_type=""
find_end_description=1
find_begin_description=0
echo "]' \\"
fi
local tmp=$(echo ${token%?} | sed 's/[("\]//g')
local opt1=$(echo $tmp | cut -d',' -f1)
local opt2=$(echo $tmp | cut -d',' -f2)
if [ ${#opt1} = ${#opt2} ]; then
test ${#opt1} -gt 1 && echo -ne "--${opt1}"
elif [ ${#opt1} -gt ${#opt2} ]; then
echo -ne "{-${opt2},--${opt1}}"
else
echo -ne "{-${opt1},--${opt2}}"
fi
elif [[ "$token" == *'")'* ]]; then
token=$(echo ${token} | sed 's/[."]//g')
if [ ${find_begin_description} -eq 1 ]; then
data_type=""
find_end_description=1
find_begin_description=0
echo "${token%)}${data_type}]' \\"
fi
else
token=$(echo ${token} | sed 's/["\\]//g')
if [ ${find_end_description} -eq 1 ]; then
find_end_description=0
find_begin_description=1
echo -ne "'[${token#\"} "
else
echo -ne "${token} "
fi
fi
elif [[ "$token" != *'->'* ]]; then
if [[ ${find_begin_description} -eq 1 && ${find_end_description} -eq 0 ]]; then
if [ "$token" != ")" ]; then
token=$(echo ${token} | sed 's/[;"\\]//g')
echo -ne "${token} "
fi
# elif [[ "${token}" == *"::value<"* ]]; then
# tmp=${token##*value<}
# tmp=${tmp%%>*}
# if [ "${tmp}" == "bool" ];then
# data_type=":boolean:(true false)"
# elif [ "${tmp}" == "int" ]; then
# data_type=":number"
# elif [ "${tmp}" == "double" ]; then
# data_type=":number"
# fi
fi
fi
done
if [ ${find_end_description} -eq 0 ]; then
echo "]' \\"
fi
unset token
unset find_begin_description find_end_description
unset data_type
}
for program_file in $1
do
local program_name=$(basename ${program_file%.cxx})
local completion_file=/tmp/_${program_name}
cat ${program_file} | grep -q add_options
if [ $? -ne 0 ]; then
pkgtools::msg_warning "Program ${program_name} does not use boost::program_option ! Skip it !"
continue
else
pkgtools::msg_notice "Build completion system for program ${program_name}"
fi
__header () {
echo "#compdef ${program_name}"
echo
echo "function _${program_name} ()"
echo "{"
echo "typeset -A opt_args"
echo "local context state line curcontext=\"$curcontext\""
echo
echo " _arguments \\"
}
__header > ${completion_file}
__parse ${program_file} >> ${completion_file}
__footer () {
echo "'*: :->args' \\"
echo "&& ret=0"
echo
echo "case \$state in"
echo "args)"
echo "_files -/"
echo ";;"
echo "esac"
echo
echo "return ret"
echo "}"
echo
echo "_${program_name} \"\$@\""
echo
echo "# Local Variables:"
echo "# mode: Shell-Script"
echo "# sh-indentation: 2"
echo "# indent-tabs-mode: nil"
echo "# sh-basic-offset: 2"
echo "# End:"
}
__footer >> ${completion_file}
pkgtools::msg_notice "File has been parsed in ${completion_file} file"
done
pkgtools::at_function_exit
return 0
}
compdef '_files -g "*.cxx"' parse_cpp_program_options
# export PYENV_ROOT="$HOME/.pyenv"
# [[ -d $PYENV_ROOT/bin ]] && export PATH="$PYENV_ROOT/bin:$PATH"
# eval "$(pyenv init -)"
function nbconvert()
{
if ! (( $+commands[jupyter-nbconvert] )); then
pkgtools::msg_error "Missing jupyter-nbconvert executable"
return 1
fi
notebooks=()
template="the-lab"
icon=""
# icon="f085"
open=false
create_gist=false
privacy="--private"
width="50%"
no_git_footer=false
opts=""
while [ -n "$1" ]; do
token="$1"
if [[ ${token:0:1} == "-" ]]; then
if [[ ${token} == "-h" || ${token} == "--help" ]]; then
return 0
elif [[ ${token} == "-d" || ${token} == "--debug" ]]; then
pkgtools::msg_using_debug
elif [[ ${token} == "-D" || ${token} == "--devel" ]]; then
pkgtools::msg_using_devel
elif [[ ${token} == "-v" || "${token}" == "--verbose" ]]; then
pkgtools::msg_using_verbose
elif [[ ${token} == "-W" || "${token}" == "--no-warning" ]]; then
pkgtools::msg_not_using_warning
elif [[ ${token} == "-q" || "${token}" == "--quiet" ]]; then
pkgtools::msg_using_quiet
export PKGTOOLS_MSG_QUIET=1
elif [[ ${token} == "-g" || "${token}" == "--gist" ]]; then
create_gist=true
elif [[ ${token} == "-p" || "${token}" == "--public" ]]; then
privacy="--no-private"
elif [[ ${token} == "-o" || "${token}" == "--open-browser" ]]; then
open=true
elif [[ "${token}" == "--no-git-footer" ]]; then
no_git_footer=true
elif [[ ${token} == "--width" ]]; then
shift 1
width="$1"
elif [[ ${token} == "--template" ]]; then
shift 1
template="$1"
elif [[ ${token} == "--icon" ]]; then
shift 1
icon="$1"
else
opts+="${token} "
fi
else
notebooks+=(${token})
fi
shift 1
done
if [[ -z $notebooks ]]; then
pkgtools::msg_error "Missing notebook files!"
fi
for notebook in ${notebooks[@]}; do
pkgtools::msg_info "Converting ipynb to html..."
jupyter nbconvert ${notebook} $(echo ${opts}) --to html --template ${template}
if pkgtools::last_command_fails; then
pkgtools::msg_error "Conversion ipynb to html fails!"
break
fi
html_file=${notebook%.ipynb}.html
sed -i -e "s/¶/#/" -e "s/f085/"${icon}"/" ${html_file}
sed -i -e "s/\(<div.*width:\) \(50%\)\(;.*\)/\1 "${width}"\3/" ${html_file}
footer="last update $(date +"%Y-%m-%d - %X %Z")"
url=$(git config --get remote.origin.url | sed -e 's#git@\(.*\):#https://\1/#' -e 's#\.git##')
if [[ ! -z $url && ! no_git_footer ]]; then
icon="fa-brands fa-git"
if [[ "$url" =~ "gitlab" ]]; then
icon="fa-brands fa-gitlab"
elif [[ "$url" =~ "ligo" ]]; then
icon="fa-brands fa-gitlab"
elif [[ "$url" =~ "github" ]]; then
icon="fa-brands fa-github-alt"
fi
footer=$(LC_MESSAGES=en git --no-pager log -1 HEAD --date=short --pretty=format:"<i class=\"${icon}\"></i> version control - commit <a href=\"$url/commit/%H\">%h</a> - %ad")
fi
sed -i -e "s#@footer@#"${footer}"#" ${html_file}
if ${create_gist}; then
updated=false
# Start notebook file with _ to make it appear first in gist
cp ${notebook} "/tmp/_${notebook}"
notebook="/tmp/_${notebook}"
desc=$(sed -nE 's/^<title>(.*)<\/title>.*$/\1/p' ${html_file})
while read -r line; do
gist_html=$(echo $line | cut -d ' ' -f 1)
gist_id=$(echo ${gist_html} | rev | cut -d '/' -f 1 | rev)
gist_desc=$(echo $line | cut -d ' ' -f 2- | sed 's/(secret)//')
gist_link=$(echo ${gist_desc} | cut -d ' ' -f 2- | sed 's/.*\(https:.*\)/\1/')
gist_desc=$(echo ${gist_desc} | sed 's/https:.*//;s/[ \t]*$//')
if [[ ${gist_desc} == ${desc} ]]; then
pkgtools::msg_info "Updating gist with id '${gist_id}'"
gist_html=$(gist -u ${gist_id} ${notebook} ${html_file})
updated=true
break
fi
done <<< $(gist -l)
if ! ${updated}; then
pkgtools::msg_info "Creating new gist..."
gist_id=$(gist ${privacy} -d ${desc} ${notebook} ${html_file} | rev | cut -d '/' -f 1 | rev)
gist_html="https://htmlpreview.github.io/?https://gist.githubusercontent.com/xgarrido/${gist_id}/raw/${html_file}"
tiny_url=""
while [[ ${tiny_url} != http* ]]; do
tiny_url=$(curl -s "http://tinyurl.com/api-create.php?url=${gist_html}")
done
gist_html=$(gist -u ${gist_id} -d "${desc} ${tiny_url}" ${notebook} ${html_file})
pkgtools::msg_info "Interactive view @ ${tiny_url}"
fi
if ${open}; then
xdg-open ${gist_html}
fi
elif ${open}; then
xdg-open ${html_file}
fi
done
}
# Connect completion system
compdef _nbconvert nbconvert
function _nbconvert ()
{
local ret=1 state
_arguments -C \
{-v,--verbose}'[verbose output]' \
{-h,--help}'[print help message]' \
{-o,--open-browser}'[open browser after conversion]' \
{-g,--gist}'[upload/update the gist]' \
{-p,--public}'[make the gist public]' \
'--width[set the document width]' \
'--template[set the nbconvert template]:template:->template' \
"*:filename: _alternative 'files:file:_files -g \"*.ipynb\"'" && ret=0
case $state in
template)
local -a _templates
_templates=(
cnu-voila:'CNU template'
act:'ACT template'
the-lab:'Default theme'
)
_describe -t _templates 'Templates' _templates
;;
esac
}